public async Task IfAFileWithAMultiLevelPathIsSaved_ThenItAppearsInTheList() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsyncImpl(JobOutputKind.JobOutput, FileBase, "File\\Under\\TestText2.txt"); var blobs = jobOutputStorage.ListOutputs(JobOutputKind.JobOutput).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobOutput/File/Under/TestText2.txt")); }
public async Task IfAFileIsSaved_ThenItAppearsInTheList() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsyncImpl(JobOutputKind.JobOutput, FileBase, "TestText1.txt"); var blobs = jobOutputStorage.ListOutputs(JobOutputKind.JobOutput).ToList(); Assert.NotEmpty(blobs); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobOutput/TestText1.txt")); }
public async Task IfAFileIsSavedWithAnExplicitPath_ThenItAppearsInTheList() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt"), "RenamedTestText1.txt"); var blobs = jobOutputStorage.ListOutputs(JobOutputKind.JobOutput).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobOutput/RenamedTestText1.txt")); }
public async Task IfAFileIsSavedWithAnExplicitMultiLevelPath_ThenItAppearsInTheList() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt"), "File/In/The/Depths/TestText3.txt"); var blobs = jobOutputStorage.ListOutputs(JobOutputKind.JobOutput).ToList(); Assert.NotEmpty(blobs); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobOutput/File/In/The/Depths/TestText3.txt")); }
public async Task IfARetryPolicyIsSpecifiedInTheStorageAccountConstructor_ThenItIsUsed() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId, new LinearRetry(TimeSpan.FromSeconds(5), 4)); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt"), "SavedWithLinearRetry1.txt"); var output = await jobOutputStorage.GetOutputAsync(JobOutputKind.JobOutput, "SavedWithLinearRetry1.txt"); var blob = output.CloudBlob; var storageClient = blob.ServiceClient; Assert.IsType <LinearRetry>(storageClient.DefaultRequestOptions.RetryPolicy); }
public async Task IfAFileIsSavedWithAMultiLevelPath_ThenItCanBeGot() { var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt"), "This/File/Is/Gettable.txt"); var blob = await jobOutputStorage.GetOutputAsync(JobOutputKind.JobOutput, "This/File/Is/Gettable.txt"); var blobContent = await blob.ReadAsByteArrayAsync(); var originalContent = File.ReadAllBytes(FilePath("TestText1.txt")); Assert.Equal(originalContent, blobContent); }
public void GetJobOutputStoragePathReturnsExpectedValue() { var jobStorage = new JobOutputStorage(new Uri("http://example.test/")); var taskLogPath = jobStorage.GetOutputStoragePath(JobOutputKind.JobOutput); Assert.Equal($"${JobOutputKind.JobOutput.ToString()}/", taskLogPath); taskLogPath = jobStorage.GetOutputStoragePath(JobOutputKind.JobPreview); Assert.Equal($"${JobOutputKind.JobPreview.ToString()}/", taskLogPath); taskLogPath = jobStorage.GetOutputStoragePath(JobOutputKind.Custom("foo")); Assert.Equal($"${JobOutputKind.Custom("foo").ToString()}/", taskLogPath); }
public async Task IfAFileIsSaved_UsingThePublicMethod_ThenTheCurrentDirectoryIsInferred() { // To avoid needing to mess with the process working directory, relative path tests // normally go through the internal SaveAsyncImpl method. This test verifies that // the public SaveAsync method forwards the appropriate directory to SaveAsyncImpl. Assert.True(File.Exists(FilePath("TestText1.txt")), "Current directory is not what was expected - cannot verify current directory inference"); var jobOutputStorage = new JobOutputStorage(StorageAccount, _jobId); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt")); var blobs = jobOutputStorage.ListOutputs(JobOutputKind.JobOutput).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobOutput/Files/TestText1.txt")); }
public async Task IfARetryPolicyIsSpecifiedInTheContainerUrlConstructor_ThenItIsUsed() { using (var batchClient = BatchClient.Open(new FakeBatchServiceClient())) { var job = batchClient.JobOperations.CreateJob(_jobId, null); var container = job.GetOutputStorageContainerUrl(StorageAccount, TimeSpan.FromMinutes(2)); var jobOutputStorage = new JobOutputStorage(new Uri(container), new LinearRetry(TimeSpan.FromSeconds(5), 4)); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, FilePath("TestText1.txt"), "SavedWithLinearRetry2.txt"); var output = await jobOutputStorage.GetOutputAsync(JobOutputKind.JobOutput, "SavedWithLinearRetry2.txt"); var blob = output.CloudBlob; var storageClient = blob.ServiceClient; Assert.IsType <LinearRetry>(storageClient.DefaultRequestOptions.RetryPolicy); } }
public async Task CloudJobGetStorageContainerUrlExtensionSasPermitsWritingToJobOutputContainer() { using (var batchClient = BatchClient.Open(new FakeBatchServiceClient())) { var job = batchClient.JobOperations.CreateJob(_jobId, null); var url = job.GetOutputStorageContainerUrl(StorageAccount, TimeSpan.FromMinutes(5)); // Write something using the SAS URL var jobOutputStorageFromUrl = new JobOutputStorage(new Uri(url)); await jobOutputStorageFromUrl.SaveAsync(JobOutputKind.JobPreview, FilePath("TestText1.txt"), "SavedViaSas.txt"); // And retrieve that same thing using the account credentials to verify // it was successfully written (and to the correct place) var jobOutputStorageFromAccount = job.OutputStorage(StorageAccount); var blobs = jobOutputStorageFromAccount.ListOutputs(JobOutputKind.JobPreview).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/$JobPreview/SavedViaSas.txt")); } }
public static async Task <int> Main(string[] args) { var assembly = typeof(Program).Assembly; WriteLine($"{assembly.ManifestModule.Name} v{assembly.GetName().Version.ToString(3)}"); // Get the command payload var payload = new Payload(); if (args.Length > 0) { payload.Action = (ActionType)Enum.Parse(typeof(ActionType), args[0]); payload.LogicalServerName = args[1] + ".database.windows.net"; payload.DatabaseName = args[2]; payload.AccessToken = args[3]; payload.ApplicatonPackageName = args[4]; payload.ApplicatonPackageVersion = args[5]; } // Cleanup folders foreach (string dir in directories) { if (Directory.Exists(dir)) { Directory.Delete(dir, true); } Directory.CreateDirectory(dir); } string sqlPackageBacpacFile = Path.Combine(dataDirectory, payload.DatabaseName + ".bacpac"); string sqlPackageLogPath = payload.DatabaseName + ".log"; var targetDir = Environment.GetEnvironmentVariable(Constants.EnvironmentVariableNames.AppPackagePrefix + "_" + payload.ApplicatonPackageName + "#" + payload.ApplicatonPackageVersion); var workingDir = Environment.GetEnvironmentVariable(Constants.EnvironmentVariableNames.TaskWorkingDir); string taskId = Environment.GetEnvironmentVariable(Constants.EnvironmentVariableNames.AzBatchTaskId); string jobContainerUrl = Environment.GetEnvironmentVariable(Constants.EnvironmentVariableNames.JobContainerUrl); // Build the import/export command var cmdBuilder = new StringBuilder(); cmdBuilder.Append($"/Action:{payload.Action}"); cmdBuilder.Append(" /MaxParallelism:16"); cmdBuilder.Append(String.Format(" /DiagnosticsFile:{0}", sqlPackageLogPath)); cmdBuilder.Append(" /p:CommandTimeout=604800"); switch (payload.Action) { case ActionType.Export: cmdBuilder.Append($" /SourceServerName:{payload.LogicalServerName}"); cmdBuilder.Append($" /SourceDatabaseName:{payload.DatabaseName}"); cmdBuilder.Append($" /AccessToken:{payload.AccessToken}"); cmdBuilder.Append($" /TargetFile:{sqlPackageBacpacFile}"); cmdBuilder.Append($" /SourceTimeout:30"); cmdBuilder.Append(String.Format(" /p:TempDirectoryForTableData=\"{0}\"", tempDirectory)); cmdBuilder.Append(" /p:VerifyFullTextDocumentTypesSupported=false"); break; case ActionType.Import: cmdBuilder.Append($" /TargetServerName:{payload.LogicalServerName}"); cmdBuilder.Append($" /TargetDatabaseName:{payload.DatabaseName}"); cmdBuilder.Append($" /AccessToken:{payload.AccessToken}"); cmdBuilder.Append($" /TargetTimeout:30"); cmdBuilder.Append($" /SourceFile:{sqlPackageBacpacFile}"); break; default: throw new ArgumentException($"Invalid action type: {payload.Action}"); } if (payload.Action == ActionType.Import) { WriteLine(string.Format("Downloading {0} bacpac file to {1}", payload.DatabaseName, sqlPackageBacpacFile)); CloudBlobContainer container = new CloudBlobContainer(new Uri(jobContainerUrl)); CloudBlockBlob blob = container.GetBlockBlobReference(String.Format("$JobOutput/{0}.bacpac", payload.DatabaseName)); blob.DownloadToFile(sqlPackageBacpacFile, FileMode.CreateNew); if (File.Exists(sqlPackageBacpacFile)) { WriteLine(string.Format("Downloaded {0} bacpac file to {1}", payload.DatabaseName, sqlPackageBacpacFile)); } else { throw new Exception(string.Format("{0} didn't download", sqlPackageBacpacFile)); } } // Perform the import/export process var startTime = DateTimeOffset.UtcNow; var process = new Process { StartInfo = new ProcessStartInfo { WorkingDirectory = workingDir, FileName = Path.Combine(targetDir, "sqlpackage.exe"), Arguments = cmdBuilder.ToString(), CreateNoWindow = true, UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardError = true } }; process.OutputDataReceived += (s, e) => WriteLine(e.Data); process.ErrorDataReceived += (s, e) => WriteErrorLine(e.Data); process.Start(); process.BeginOutputReadLine(); process.BeginErrorReadLine(); process.WaitForExit(); WriteLine(String.Format("SqlPackage.exe exited with code: {0}", process.ExitCode)); if (payload.Action == ActionType.Export) { if (File.Exists(sqlPackageBacpacFile)) { WriteLine(string.Format("Downloaded {0} bacpac file to {1}", payload.DatabaseName, sqlPackageBacpacFile)); } else { throw new Exception(string.Format("{0} didn't downloaded", sqlPackageBacpacFile)); } // Persist the Job Output JobOutputStorage jobOutputStorage = new JobOutputStorage(new Uri(jobContainerUrl)); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, sqlPackageLogPath); WriteLine(String.Format("Uploaded {0} to job account", sqlPackageLogPath)); await jobOutputStorage.SaveAsync(JobOutputKind.JobOutput, sqlPackageBacpacFile, payload.DatabaseName + ".bacpac"); WriteLine(String.Format("Uploaded {0} to job account", sqlPackageBacpacFile)); } // We are tracking the disk file to save our standard output, but the node agent may take // up to 3 seconds to flush the stdout stream to disk. So give the file a moment to catch up. await Task.Delay(stdoutFlushDelay); // Cleanup folders foreach (string dir in directories) { if (Directory.Exists(dir)) { Directory.Delete(dir, true); } } return(process.ExitCode); }