bool PutBlob(string containerName, string blobName, object item, Type type, bool overwrite, string expectedEtag, out string etag, IDataSerializer serializer = null) { var dataSerializer = serializer ?? DefaultSerializer; lock (_syncRoot) { etag = null; if (Containers.ContainsKey(containerName)) { if (Containers[containerName].BlobNames.Contains(blobName)) { if (!overwrite || expectedEtag != null && expectedEtag != Containers[containerName].BlobsEtag[blobName]) { return(false); } using (var stream = new MemoryStream()) { dataSerializer.Serialize(item, stream, type); Containers[containerName].SetBlob(blobName, stream); } etag = Containers[containerName].BlobsEtag[blobName]; return(true); } using (var stream = new MemoryStream()) { dataSerializer.Serialize(item, stream, type); Containers[containerName].AddBlob(blobName, stream); } etag = Containers[containerName].BlobsEtag[blobName]; return(true); } if (!BlobStorageExtensions.IsContainerNameValid(containerName)) { throw new NotSupportedException("the containerName is not compliant with azure constraints on container names"); } Containers.Add(containerName, new MemoryContainer()); using (var stream = new MemoryStream()) { dataSerializer.Serialize(item, stream, type); Containers[containerName].AddBlob(blobName, stream); } etag = Containers[containerName].BlobsEtag[blobName]; return(true); } }
bool PutBlobStream(string containerName, string blobName, Stream stream, bool overwrite, string expectedEtag, out string etag) { var memoryStream = stream as MemoryStream; if (memoryStream == null) { memoryStream = new MemoryStream(); stream.CopyTo(memoryStream); } memoryStream.Position = 0; lock (_syncRoot) { etag = null; if (Containers.ContainsKey(containerName)) { if (Containers[containerName].BlobNames.Contains(blobName)) { if (!overwrite || expectedEtag != null && expectedEtag != Containers[containerName].BlobsEtag[blobName]) { return(false); } Containers[containerName].SetBlob(blobName, memoryStream); etag = Containers[containerName].BlobsEtag[blobName]; return(true); } Containers[containerName].AddBlob(blobName, memoryStream); etag = Containers[containerName].BlobsEtag[blobName]; return(true); } if (!BlobStorageExtensions.IsContainerNameValid(containerName)) { throw new NotSupportedException("the containerName is not compliant with azure constraints on container names"); } Containers.Add(containerName, new MemoryContainer()); Containers[containerName].AddBlob(blobName, memoryStream); etag = Containers[containerName].BlobsEtag[blobName]; return(true); } }
public bool CreateContainerIfNotExist(string containerName) { lock (_syncRoot) { if (!BlobStorageExtensions.IsContainerNameValid(containerName)) { throw new NotSupportedException("the containerName is not compliant with azure constraints on container names"); } if (Containers.Keys.Contains(containerName)) { return(false); } Containers.Add(containerName, new MemoryContainer()); return(true); } }
public void RunTaskAndUploadFiles_FilesAreSuccessfullyUploaded() { Action test = async() => { string containerName = "runtaskanduploadfiles"; StagingStorageAccount storageAccount = TestUtilities.GetStorageCredentialsFromEnvironment(); CloudStorageAccount cloudStorageAccount = new CloudStorageAccount( new StorageCredentials(storageAccount.StorageAccount, storageAccount.StorageAccountKey), blobEndpoint: storageAccount.BlobUri, queueEndpoint: null, tableEndpoint: null, fileEndpoint: null); CloudBlobClient blobClient = cloudStorageAccount.CreateCloudBlobClient(); using (BatchClient batchCli = TestUtilities.OpenBatchClient(TestUtilities.GetCredentialsFromEnvironment())) { string jobId = "RunTaskAndUploadFiles-" + TestUtilities.GetMyName(); try { // Create container and writeable SAS var container = blobClient.GetContainerReference(containerName); await container.CreateIfNotExistsAsync(); var sas = container.GetSharedAccessSignature(new SharedAccessBlobPolicy() { Permissions = SharedAccessBlobPermissions.Write, SharedAccessExpiryTime = DateTime.UtcNow.AddDays(1) }); var fullSas = container.Uri + sas; CloudJob createJob = batchCli.JobOperations.CreateJob(jobId, new PoolInformation() { PoolId = this.poolFixture.PoolId }); createJob.Commit(); const string blobPrefix = "foo/bar"; const string taskId = "simpletask"; CloudTask unboundTask = new CloudTask(taskId, "echo test") { OutputFiles = new List <OutputFile> { new OutputFile( filePattern: @"../*.txt", destination: new OutputFileDestination(new OutputFileBlobContainerDestination(fullSas, blobPrefix)), uploadOptions: new OutputFileUploadOptions(uploadCondition: OutputFileUploadCondition.TaskCompletion)) } }; batchCli.JobOperations.AddTask(jobId, unboundTask); var tasks = batchCli.JobOperations.ListTasks(jobId); var monitor = batchCli.Utilities.CreateTaskStateMonitor(); monitor.WaitAll(tasks, TaskState.Completed, TimeSpan.FromMinutes(1)); // Ensure that the correct files got uploaded var blobs = await BlobStorageExtensions.ListBlobs(container, useFlatBlobListing : true); blobs = blobs.ToList(); Assert.Equal(4, blobs.Count()); //There are 4 .txt files created, stdout, stderr, fileuploadout, and fileuploaderr foreach (var blob in blobs) { var blockBlob = blob as CloudBlockBlob; Assert.StartsWith(blobPrefix, blockBlob.Name); } } finally { TestUtilities.DeleteJobIfExistsAsync(batchCli, jobId).Wait(); var container = blobClient.GetContainerReference(containerName); await container.DeleteIfExistsAsync(); } } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }
public async Task ComputeNodeUploadLogs() { Action test = async() => { using (BatchClient batchCli = TestUtilities.OpenBatchClientFromEnvironmentAsync().Result) { var node = batchCli.PoolOperations.ListComputeNodes(this.poolFixture.PoolId).First(); // Generate a storage container URL StagingStorageAccount storageAccount = TestUtilities.GetStorageCredentialsFromEnvironment(); CloudStorageAccount cloudStorageAccount = new CloudStorageAccount( new StorageCredentials(storageAccount.StorageAccount, storageAccount.StorageAccountKey), blobEndpoint: storageAccount.BlobUri, queueEndpoint: null, tableEndpoint: null, fileEndpoint: null); CloudBlobClient blobClient = cloudStorageAccount.CreateCloudBlobClient(); const string containerName = "computenodelogscontainer"; var container = blobClient.GetContainerReference(containerName); try { await container.CreateIfNotExistsAsync(); var blobs = await BlobStorageExtensions.ListBlobs(container); // Ensure that there are no items in the container to begin with Assert.Empty(blobs); var sas = container.GetSharedAccessSignature(new SharedAccessBlobPolicy() { Permissions = SharedAccessBlobPermissions.Write, SharedAccessExpiryTime = DateTime.UtcNow.AddDays(1) }); var fullSas = container.Uri + sas; var startTime = DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(5)); var result = batchCli.PoolOperations.UploadComputeNodeBatchServiceLogs( this.poolFixture.PoolId, node.Id, fullSas, startTime); Assert.NotEqual(0, result.NumberOfFilesUploaded); Assert.NotEmpty(result.VirtualDirectoryName); // Allow up to 2m for files to get uploaded DateTime timeoutAt = DateTime.UtcNow.AddMinutes(2); while (DateTime.UtcNow < timeoutAt) { blobs = await BlobStorageExtensions.ListBlobs(container); if (blobs.Any()) { break; } } Assert.NotEmpty(blobs); } finally { await container.DeleteIfExistsAsync(); } } }; SynchronizationContextHelper.RunTest(test, TestTimeout); }