Esempio n. 1
0
        public void SetUp()
        {
            const int ContainerNameMaxLength = 63;

            var name
                = "test-"
                  + TestContext.CurrentContext.Test.MethodName
                  .ToLowerInvariant()
                  .Replace('_', '-');

            if (name.Length > ContainerNameMaxLength)
            {
                name = name.Substring(0, ContainerNameMaxLength);
            }

            Configuration = new AzureBlobStorageConfiguration
            {
                ConnectionString = "UseDevelopmentStorage=true",
                ContainerName    = name,
            };

            Container = new BlobContainerClient(
                Configuration.ConnectionString,
                Configuration.ContainerName
                );

            Container.DeleteIfExists();
            Container.Create();
        }
Esempio n. 2
0
        public async Task ComputeNodeUploadLogs()
        {
            void test()
            {
                using BatchClient batchCli = TestUtilities.OpenBatchClientFromEnvironmentAsync().Result;
                const string containerName = "computenodelogscontainer";

                // Generate a storage container URL
                StagingStorageAccount storageAccount  = TestUtilities.GetStorageCredentialsFromEnvironment();
                BlobServiceClient     blobClient      = BlobUtilities.GetBlobServiceClient(storageAccount);
                BlobContainerClient   containerClient = BlobUtilities.GetBlobContainerClient(containerName, blobClient, storageAccount);

                try
                {
                    containerClient.CreateIfNotExists();
                    string sasUri = BlobUtilities.GetWriteableSasUri(containerClient, storageAccount);

                    var blobs = containerClient.GetAllBlobs();

                    // Ensure that there are no items in the container to begin with
                    Assert.Empty(blobs);

                    var startTime = DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(5));

                    var node   = batchCli.PoolOperations.ListComputeNodes(poolFixture.PoolId).First();
                    var result = batchCli.PoolOperations.UploadComputeNodeBatchServiceLogs(
                        poolFixture.PoolId,
                        node.Id,
                        sasUri,
                        startTime);

                    Assert.NotEqual(0, result.NumberOfFilesUploaded);
                    Assert.NotEmpty(result.VirtualDirectoryName);

                    // Allow up to 2m for files to get uploaded
                    DateTime timeoutAt = DateTime.UtcNow.AddMinutes(2);
                    while (DateTime.UtcNow < timeoutAt)
                    {
                        blobs = containerClient.GetAllBlobs();
                        if (blobs.Any())
                        {
                            break;
                        }
                    }

                    Assert.NotEmpty(blobs);
                }
                finally
                {
                    containerClient.DeleteIfExists();
                }
            }

            SynchronizationContextHelper.RunTest(test, TestTimeout);
        }
        public void SetUp()
        {
            _loggerProvider = new TestLoggerProvider();
            var loggerFactory = new LoggerFactory();

            loggerFactory.AddProvider(_loggerProvider);
            _logger = loggerFactory.CreateLogger <BlobListener>();

            _blobServiceClient = AzuriteNUnitFixture.Instance.GetBlobServiceClient();
            _blobContainer     = _blobServiceClient.GetBlobContainerClient(TestContainerName);
            _blobContainer.DeleteIfExists();
            _blobContainer.CreateIfNotExists();
        }
Esempio n. 4
0
        public BlobQueueTriggerExecutorTests(AzuriteFixture azuriteFixture)
        {
            var loggerFactory = new LoggerFactory();

            loggerFactory.AddProvider(_loggerProvider);
            _logger = loggerFactory.CreateLogger <BlobListener>();

            var account = azuriteFixture.GetAccount();

            blobServiceClient = account.CreateBlobServiceClient();
            blobContainer     = blobServiceClient.GetBlobContainerClient(TestContainerName);
            blobContainer.DeleteIfExists();
            blobContainer.CreateIfNotExists();
        }
        public override IDisposable?PrepareForHandleLost()
        {
            this.Options = o =>
            {
                DefaultTestingOptions(o);
                o.RenewalCadence(TimeSpan.FromMilliseconds(10));
            };

            using var md5      = MD5.Create();
            this.ContainerName = $"distributed-lock-handle-lost-{new BigInteger(md5.ComputeHash(Encoding.UTF8.GetBytes(TargetFramework.Current + TestContext.CurrentContext.Test.FullName))):x}";
            var containerClient = new BlobContainerClient(AzureCredentials.ConnectionString, this.ContainerName);

            containerClient.CreateIfNotExists();
            this._disposables.Add(() => containerClient.DeleteIfExists());
            return(new HandleLostScope(this.ContainerName));
        }
Esempio n. 6
0
        public void TestCanAcquireIfContainerLeased()
        {
            using var provider = new TestingAzureBlobLeaseDistributedLockProvider();
            provider.Strategy.ContainerName = "leased-container" + TargetFramework.Current.Replace('.', '-');

            var containerClient      = new BlobContainerClient(AzureCredentials.ConnectionString, provider.Strategy.ContainerName);
            var containerLeaseClient = new BlobLeaseClient(containerClient);

            try
            {
                containerClient.CreateIfNotExists();
                containerLeaseClient.Acquire(TimeSpan.FromSeconds(60));

                var @lock = provider.CreateLock(nameof(TestCanAcquireIfContainerLeased));

                using var handle = @lock.TryAcquire();
                Assert.IsNotNull(handle);
            }
            finally
            {
                try { containerLeaseClient.Release(); }
                finally { containerClient.DeleteIfExists(); }
            }
        }
Esempio n. 7
0
 public void TearDown()
 {
     Container.DeleteIfExists();
 }
        public async Task RunTaskAndUploadFiles_FilesAreSuccessfullyUploaded()
        {
            async Task test()
            {
                using BatchClient batchCli = TestUtilities.OpenBatchClient(TestUtilities.GetCredentialsFromEnvironment());
                string jobId         = "RunTaskAndUploadFiles-" + TestUtilities.GetMyName();
                string containerName = "runtaskanduploadfiles";
                StagingStorageAccount storageAccount  = TestUtilities.GetStorageCredentialsFromEnvironment();
                BlobServiceClient     blobClient      = BlobUtilities.GetBlobServiceClient(storageAccount);
                BlobContainerClient   containerClient = BlobUtilities.GetBlobContainerClient(containerName, blobClient, storageAccount);

                try
                {
                    // Create container and writeable SAS
                    containerClient.CreateIfNotExists();
                    string sasUri = BlobUtilities.GetWriteableSasUri(containerClient, storageAccount);

                    CloudJob createJob = batchCli.JobOperations.CreateJob(jobId, new PoolInformation {
                        PoolId = poolFixture.PoolId
                    });
                    createJob.Commit();

                    const string blobPrefix = "foo/bar";
                    const string taskId     = "simpletask";

                    OutputFileBlobContainerDestination containerDestination = new OutputFileBlobContainerDestination(sasUri, blobPrefix);
                    containerDestination.UploadHeaders = new List <HttpHeader> {
                        new HttpHeader("x-ms-blob-content-type", "test-type")
                    };

                    OutputFileDestination   destination   = new OutputFileDestination(containerDestination);
                    OutputFileUploadOptions uploadOptions = new OutputFileUploadOptions(uploadCondition: OutputFileUploadCondition.TaskCompletion);
                    CloudTask unboundTask = new CloudTask(taskId, "echo test")
                    {
                        OutputFiles = new List <OutputFile>
                        {
                            new OutputFile(@"../*.txt", destination, uploadOptions)
                        }
                    };

                    batchCli.JobOperations.AddTask(jobId, unboundTask);

                    IPagedEnumerable <CloudTask> tasks = batchCli.JobOperations.ListTasks(jobId);

                    TaskStateMonitor monitor = batchCli.Utilities.CreateTaskStateMonitor();
                    monitor.WaitAll(tasks, TaskState.Completed, TimeSpan.FromMinutes(1));

                    // Ensure that the correct files got uploaded
                    List <BlobItem> blobs = containerClient.GetAllBlobs();
                    Assert.Equal(4, blobs.Count()); //There are 4 .txt files created, stdout, stderr, fileuploadout, and fileuploaderr
                    foreach (BlobItem blob in blobs)
                    {
                        Assert.StartsWith(blobPrefix, blob.Name);
                        Assert.Equal("test-type", blob.Properties.ContentType); // Ensure test Upload header was applied to blob.
                    }
                }
                finally
                {
                    await TestUtilities.DeleteJobIfExistsAsync(batchCli, jobId).ConfigureAwait(false);

                    containerClient.DeleteIfExists();
                }
            }

            await SynchronizationContextHelper.RunTestAsync(test, TestTimeout);
        }
Esempio n. 9
0
        // *************************************************************************************************************************
        // Instructions: This sample can be run using either the Azure storage emulator that installs as part of the Azure SDK - or by
        // updating the App.Config file with your AccountName and Key.
        //
        // To run the sample using the storage emulator (default option)
        //      1. Start the Azure storage emulator (once only) by pressing the Start button or the Windows key and searching for it
        //         by typing "Azure storage emulator". Select it from the list of applications to start it.
        //      2. Set breakpoints and run the project using F10.
        //
        // To run the sample using a storage account
        //      1. Open the app.config file and comment out the connection string for the emulator (UseDevelopmentStorage=True) and
        //         uncomment the connection string for the storage service (AccountName=[]...)
        //      2. Create a storage account through the Azure Portal and provide your [AccountName] and [AccountKey] in
        //         the App.Config file. See http://go.microsoft.com/fwlink/?LinkId=325277 for more information
        //      3. Set breakpoints and run the project using F10.
        //
        // *************************************************************************************************************************
        static void Main()
        {
            const string containerPrefix = "sas-container-";
            const string policyPrefix    = "tutorial-policy-";

            const string blobName1    = "sasBlob1.txt";
            const string blobContent1 = "Blob created with an container SAS with store access policy granting write and list permissions on the container.";

            const string blobName2    = "sasBlob2.txt";
            const string blobContent2 = "Blob created with an container SAS granting all permissions on the container.";

            const string blobName3    = "sasBlob3.txt";
            const string blobContent3 = "Blob created with a blob SAS with store access policy granting create/write permissions to the blob.";

            const string blobName4    = "sasBlob4.txt";
            const string blobContent4 = "Blob created with a blob SAS granting all permissions to the blob.";

            string containerName         = containerPrefix + DateTime.Now.Ticks.ToString();
            string storeAccessPolicyName = policyPrefix + DateTime.Now.Ticks.ToString();

            //Parse the connection string and return a reference to the storage account.
            BlobServiceClient blobServiceClient = new BlobServiceClient(ConfigurationManager.AppSettings.Get("StorageConnectionString"));

            //Get a reference to a container to use for the sample code, and create it if it does not exist.
            BlobContainerClient container = blobServiceClient.GetBlobContainerClient(containerName);

            try
            {
                container.CreateIfNotExists();
            }
            catch (RequestFailedException)
            {
                // Ensure that the storage emulator is running if using emulator connection string.
                Console.WriteLine("If you are running with the default connection string, please make sure you have started the storage emulator. Press the Windows key and type Azure Storage to select and run it from the list of applications - then restart the sample.");
                Console.ReadLine();
                throw;
            }

            //Create a new access policy on the container, which may be optionally used to provide constraints for
            //shared access signatures on the container and the blob.
            //The access policy provides create, write, read, list, and delete permissions.
            StorageSharedKeyCredential storageSharedKeyCredential = new StorageSharedKeyCredential(blobServiceClient.AccountName, ConfigurationManager.AppSettings.Get("AzureStorageEmulatorAccountKey"));

            CreateStoreAccessPolicy(container, storeAccessPolicyName);

            //Generate an SAS URI for the container. The SAS has write and list permissions.
            Uri containerSAS = container.GenerateSasUri(BlobContainerSasPermissions.Write | BlobContainerSasPermissions.List, DateTimeOffset.UtcNow.AddHours(1));

            Console.WriteLine("1. SAS for blob container : " + containerSAS);
            Console.WriteLine();

            //Test the SAS to ensure it works as expected.
            //The write and list operations should succeed, and the read and delete operations should fail.
            TestContainerSAS(containerSAS, blobName1, blobContent1);
            Console.WriteLine();

            //Generate an SAS URI for the container. The SAS has all permissions.
            UriBuilder storeContainerSAS = GetContainerSasUri(container, storageSharedKeyCredential);

            Console.WriteLine("2. SAS for blob container : " + storeContainerSAS);
            Console.WriteLine();

            //Test the SAS to ensure it works as expected.
            //The write, read, list, and delete operations should all succeed.
            TestContainerSAS(storeContainerSAS.Uri, blobName2, blobContent2);
            Console.WriteLine();

            //Generate an SAS URI for a blob within the container. The SAS has create, write, and read permissions.
            Uri storeBlobSAS = container.GetBlobClient(blobName3).GenerateSasUri(BlobSasPermissions.Create | BlobSasPermissions.Write | BlobSasPermissions.Read, DateTimeOffset.UtcNow.AddHours(1));

            Console.WriteLine("3. SAS for blob : " + storeBlobSAS);
            Console.WriteLine();

            //Test the SAS to ensure it works as expected.
            //The create, write, and read operations should succeed, and the delete operation should fail.
            TestBlobSAS(storeBlobSAS, blobContent3);
            Console.WriteLine();

            //Generate an SAS URI for a blob within the container. The SAS has all permissions.
            Uri blobSAS = GetBlobSasUri(container, blobName4);

            Console.WriteLine("4. SAS for blob : " + blobSAS);
            Console.WriteLine();

            //Test the SAS to ensure it works as expected.
            //The create, write, read, and delete operations should all succeed.
            TestBlobSAS(blobSAS, blobContent4);
            Console.WriteLine();

            //Delete the container to clean up.
            container.DeleteIfExists();

            Console.ReadLine();
        }
        public async Task GivenTwoEndTimes_WhenProcessIncrementalData_CorrectResultShouldBeReturnedAsync()
        {
            Skip.If(_blobServiceClient == null);
            var uniqueContainerName = Guid.NewGuid().ToString("N");
            BlobContainerClient blobContainerClient = _blobServiceClient.GetBlobContainerClient(uniqueContainerName);

            // Make sure the container is deleted before running the tests
            Assert.False(await blobContainerClient.ExistsAsync());

            // Load configuration
            Environment.SetEnvironmentVariable("job:containerName", uniqueContainerName);
            Environment.SetEnvironmentVariable("filter:filterScope", "Group");
            Environment.SetEnvironmentVariable("filter:requiredTypes", "Condition,MedicationRequest,Patient");
            Environment.SetEnvironmentVariable("filter:typeFilters", "MedicationRequest?status=active,MedicationRequest?status=completed&date=gt2018-07-01T00:00:00Z");

            // this group includes all the 80 patients
            Environment.SetEnvironmentVariable("filter:groupId", "72d653ce-2dbb-4432-bfa0-9ac47d0e0a2c");

            var configuration = new ConfigurationBuilder()
                                .AddJsonFile(TestConfigurationPath)
                                .AddEnvironmentVariables()
                                .Build();

            // set end time to the time that not all the resources are imported.
            configuration.GetSection(ConfigurationConstants.JobConfigurationKey)["endTime"] = "2022-06-29T16:00:00.000Z";

            try
            {
                // trigger first time, only the resources imported before end time are synced.
                var host_1 = CreateHostBuilder(configuration).Build();
                await host_1.RunAsync();

                // Check job status
                var fileName    = Path.Combine(_expectedDataFolder, "GroupScope_AllPatient_Filters_part1.json");
                var expectedJob = JsonConvert.DeserializeObject <Job>(File.ReadAllText(fileName));

                await CheckJobStatus(blobContainerClient, expectedJob);

                // modify the job end time to fake incremental sync.
                // the second triggered job should sync the other resources
                configuration.GetSection(ConfigurationConstants.JobConfigurationKey)["endTime"] =
                    "2022-07-01T00:00:00.000Z";

                var host_2 = CreateHostBuilder(configuration).Build();
                await host_2.RunAsync();

                var completedJobCount = 0;
                Dictionary <string, int> totalResourceCount = new Dictionary <string, int>();
                await foreach (var blobItem in blobContainerClient.GetBlobsAsync(prefix: "jobs/completedJobs"))
                {
                    _testOutputHelper.WriteLine($"Queried blob {blobItem.Name}.");

                    if (blobItem.Name.EndsWith(".json"))
                    {
                        completedJobCount++;
                        var blobClient       = blobContainerClient.GetBlobClient(blobItem.Name);
                        var blobDownloadInfo = await blobClient.DownloadAsync();

                        using var reader = new StreamReader(blobDownloadInfo.Value.Content, Encoding.UTF8);
                        var completedJob = JsonConvert.DeserializeObject <Job>(await reader.ReadToEndAsync());

                        Assert.Equal(JobStatus.Succeeded, completedJob.Status);

                        totalResourceCount =
                            totalResourceCount.ConcatDictionaryCount(completedJob.TotalResourceCounts);

                        // Check parquet files
                        Assert.Equal(1, await GetResultFileCount(blobContainerClient, "result/Patient/2022/06/29"));
                        Assert.Equal(1, await GetResultFileCount(blobContainerClient, "result/Condition/2022/06/29"));
                        Assert.Equal(1, await GetResultFileCount(blobContainerClient, "result/Condition/2022/07/01"));
                        Assert.Equal(1, await GetResultFileCount(blobContainerClient, "result/MedicationRequest/2022/06/29"));
                        Assert.Equal(1, await GetResultFileCount(blobContainerClient, "result/MedicationRequest/2022/07/01"));

                        var schedulerMetadata = await GetSchedulerMetadata(blobContainerClient);

                        Assert.Empty(schedulerMetadata.FailedJobs);
                        Assert.Equal(80, schedulerMetadata.ProcessedPatients.Count());
                    }
                }

                // there should be two completed jobs
                Assert.Equal(2, completedJobCount);

                var allResourceFileName = Path.Combine(_expectedDataFolder, "GroupScope_AllPatient_Filters.json");
                var allResourceJob      = JsonConvert.DeserializeObject <Job>(File.ReadAllText(allResourceFileName));

                // the total resource count of these two job should equal to all the resources count
                Assert.True(DictionaryEquals(allResourceJob.TotalResourceCounts, totalResourceCount));
            }
            finally
            {
                _testOutputHelper.WriteLine("Dispose.");
                blobContainerClient.DeleteIfExists();
            }
        }