private static async Task WriteFinalOutput(int number, string taskId, TaskOutputStorage taskStorage, string index) { Console.WriteLine("Write output to task storage from EXE"); using (ITrackedSaveOperation stdout = await taskStorage.SaveTrackedAsync( TaskOutputKind.TaskLog, RootDir("stdout.txt"), "stdout.txt", TimeSpan.FromSeconds(15))) { Console.WriteLine("Dump output to file from EXE - start"); string outputFile = "results.txt"; using (StreamWriter output = File.CreateText(WorkingDir(outputFile))) { output.WriteLine($"final task {taskId}"); output.WriteLine($"element: {number}"); } // Persist the task output to Azure Storage Task.WaitAll(taskStorage.SaveAsync(TaskOutputKind.TaskOutput, outputFile)); // We are tracking the disk file to save our standard output, but the node agent may take // up to 3 seconds to flush the stdout stream to disk. So give the file a moment to catch up. await Task.Delay(stdoutFlushDelay); Console.WriteLine("Dump output to file from EXE - finish"); } }
public async Task IfAFileIsSavedWithAnExplicitPath_ThenItAppearsInTheList() { var taskOutputStorage = new TaskOutputStorage(StorageAccount, _jobId, _taskId); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskPreview, FilePath("TestText1.txt"), "RenamedTestText1.txt"); var blobs = taskOutputStorage.ListOutputs(TaskOutputKind.TaskPreview).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/{_taskId}/$TaskPreview/RenamedTestText1.txt")); }
public async Task IfAFileIsSavedWithAnExplicitMultiLevelPath_ThenItAppearsInTheList() { var taskOutputStorage = new TaskOutputStorage(StorageAccount, _jobId, _taskId); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskPreview, FilePath("TestText1.txt"), "File/In/The/Depths/TestText3.txt"); var blobs = taskOutputStorage.ListOutputs(TaskOutputKind.TaskPreview).ToList(); Assert.NotEmpty(blobs); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/{_taskId}/$TaskPreview/File/In/The/Depths/TestText3.txt")); }
public async Task IfARetryPolicyIsSpecifiedInTheStorageAccountConstructor_ThenItIsUsed() { var taskOutputStorage = new TaskOutputStorage(StorageAccount, _jobId, _taskId, new LinearRetry(TimeSpan.FromSeconds(5), 4)); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskOutput, FilePath("TestText1.txt"), "SavedWithLinearRetry1.txt"); var output = await taskOutputStorage.GetOutputAsync(TaskOutputKind.TaskOutput, "SavedWithLinearRetry1.txt"); var blob = output.CloudBlob; var storageClient = blob.ServiceClient; Assert.IsType <LinearRetry>(storageClient.DefaultRequestOptions.RetryPolicy); }
public async Task IfAFileIsSavedWithAMultiLevelPath_ThenItCanBeGot() { var taskOutputStorage = new TaskOutputStorage(StorageAccount, _jobId, _taskId); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskPreview, FilePath("TestText1.txt"), "This/File/Is/Gettable.txt"); var blob = await taskOutputStorage.GetOutputAsync(TaskOutputKind.TaskPreview, "This/File/Is/Gettable.txt"); var blobContent = await blob.ReadAsByteArrayAsync(); var originalContent = File.ReadAllBytes(FilePath("TestText1.txt")); Assert.Equal(originalContent, blobContent); }
public async Task IfAFileIsSaved_UsingThePublicMethod_ThenTheCurrentDirectoryIsInferred() { // To avoid needing to mess with the process working directory, relative path tests // normally go through the internal SaveAsyncImpl method. This test verifies that // the public SaveAsync method forwards the appropriate directory to SaveAsyncImpl. Assert.True(File.Exists(FilePath("TestText1.txt")), "Current directory is not what was expected - cannot verify current directory inference"); var taskOutputStorage = new TaskOutputStorage(StorageAccount, _jobId, _taskId); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskPreview, FilePath("TestText1.txt")); var blobs = taskOutputStorage.ListOutputs(TaskOutputKind.TaskPreview).ToList(); Assert.NotEqual(0, blobs.Count); Assert.Contains(blobs, b => b.Uri.AbsoluteUri.EndsWith($"{_jobId}/{_taskId}/$TaskPreview/TestText1.txt")); }
public async Task IfARetryPolicyIsSpecifiedInTheContainerUrlConstructor_ThenItIsUsed() { using (var batchClient = await BatchClient.OpenAsync(new FakeBatchServiceClient())) { var job = batchClient.JobOperations.CreateJob(_jobId, null); var container = job.GetOutputStorageContainerUrl(StorageAccount, TimeSpan.FromMinutes(2)); var taskOutputStorage = new TaskOutputStorage(new Uri(container), _taskId, new LinearRetry(TimeSpan.FromSeconds(5), 4)); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskOutput, FilePath("TestText1.txt"), "SavedWithLinearRetry2.txt"); var output = await taskOutputStorage.GetOutputAsync(TaskOutputKind.TaskOutput, "SavedWithLinearRetry2.txt"); var blob = output.CloudBlob; var storageClient = blob.ServiceClient; Assert.IsType <LinearRetry>(storageClient.DefaultRequestOptions.RetryPolicy); } }
static void Main(string[] args) { /* * Variables set by environment variables. If the environment variable does not exist an alternate value is defined under * videoProcessor/Properties/Settings.settings. Double click that item in Solution Explorer if you want to put in persistent values. */ var sbConnection = getEnvironmentVariable("SB_CONNECT", Properties.Settings.Default.SB_CONNECT); var videoSAS = getEnvironmentVariable("VIDEO_STORAGE_SAS", Properties.Settings.Default.VIDEO_STORAGE_SAS); var videoAccountName = getEnvironmentVariable("VIDEO_STORAGE_ACCOUNT", Properties.Settings.Default.VIDEO_STORAGE_ACCOUNT); var batchStorageAccount = getEnvironmentVariable("BATCH_STORAGE_ACCOUNT", Properties.Settings.Default.BATCH_STORAGE_ACCOUNT); var batchStorageKey = getEnvironmentVariable("BATCH_STORAGE_KEY", Properties.Settings.Default.BATCH_STORAGE_KEY); var batchURI = getEnvironmentVariable("BATCH_URI", Properties.Settings.Default.BATCH_URI); var batchName = getEnvironmentVariable("BATCH_ACCOUNT_NAME", Properties.Settings.Default.BATCH_ACCOUNT_NAME); var batchKey = getEnvironmentVariable("BATCH_KEY", Properties.Settings.Default.BATCH_KEY); var development = getEnvironmentBoolean("DEVELOPMENT", Properties.Settings.Default.DEVELOPMENT); //This data does not come from user defined environment variables. var taskOutputFolder = getEnvironmentVariable("AZ_BATCH_TASK_DIR", "d:\\dump"); var jobId = getEnvironmentVariable("AZ_BATCH_JOB_ID", "DEMOJOB"); var taskId = getEnvironmentVariable("AZ_BATCH_TASK_ID", "DEMOTASK"); var taskOutputFile = taskOutputFolder + @"\stdout.log"; var errOutputFile = taskOutputFolder + @"\stderr.log"; if (development) { if (!Directory.Exists(taskOutputFolder)) { Directory.CreateDirectory(taskOutputFolder); } } //Redirect console output to the stdOut and stddError files using (var taskOutput = new StreamWriter(taskOutputFile)) { var output = Console.Out; if (!development) { Console.SetOut(taskOutput); } using (var taskErrors = new StreamWriter(errOutputFile)) { var errorOutput = Console.Error; if (!development) { Console.SetError(taskErrors); } //wrap the rest of the operation in try/catch and log errors to batch log output try { //Get Service bus queue. Raise an error if the queue does not exist. var queueName = "videoprocess"; var sbEnv = Microsoft.ServiceBus.NamespaceManager.CreateFromConnectionString(sbConnection); if (!sbEnv.QueueExists(queueName)) { throw new ArgumentException("The \"videoprocess\" queue does not exist."); } var sbClient = QueueClient.CreateFromConnectionString(sbConnection, queueName); //Set up the output container in the same storage account as the video var resultsContainerName = "results"; sbClient.PrefetchCount = 5; var videoCred = new StorageCredentials(videoSAS); string endpoint = $"https://{videoAccountName}.blob.core.windows.net/"; CloudBlobClient blobClient = new CloudBlobClient(new Uri(endpoint), videoCred); var container = blobClient.GetContainerReference(resultsContainerName); container.CreateIfNotExists(); //Process the messages in the videoprocess queue. Create a dummy file in the output container for each message. var emptyCount = 0; var messageWait = new TimeSpan(0, 0, 2); var message = sbClient.Receive(messageWait); var cleanRun = true; //This processes two empty receives. This should get around any timing issues. while ((message != null) || (emptyCount < 2)) { if (message != null) { try { var fileName = message.GetBody <string>(); Console.WriteLine($"Message: {fileName}"); var blob = container.GetBlockBlobReference(fileName); blob.UploadText("File successfully processed"); message.Complete(); } catch (Exception exPerMessage) { Console.Error.WriteLine($"Error processing message {message.MessageId}: {exPerMessage.Message}"); cleanRun = false; } } else { emptyCount++; } message = sbClient.Receive(messageWait); } if (cleanRun) { Console.WriteLine("Successfully processed all messages"); } else { Console.WriteLine("There were errors writing the files"); } } catch (Exception ex) { Console.Error.WriteLine($"An error occurred processing the messages: {ex.Message}"); } if (!development) { Console.SetError(errorOutput); } taskErrors.Flush(); taskErrors.Close(); } if (!development) { Console.SetOut(output); } taskOutput.Flush(); taskOutput.Close(); } //Try to write the output to the batch output using the library. If that doesn't work (testing) then write to the console if (!development) { try { var linkedStorageAccount = new CloudStorageAccount(new StorageCredentials(batchStorageAccount, batchStorageKey), true); var taskOutputStorage = new TaskOutputStorage(linkedStorageAccount, jobId, taskId); //This needs a task because TaskOutputStorage is completely async var t = Task.Run(async() => { //Set up output storage BatchSharedKeyCredentials cred = new BatchSharedKeyCredentials(batchURI, batchName, batchKey); using (BatchClient batchClient = BatchClient.Open(cred)) { // Create the blob storage container for the outputs. await batchClient.JobOperations.GetJob(jobId).PrepareOutputStorageAsync(linkedStorageAccount); } await taskOutputStorage.SaveAsync(TaskOutputKind.TaskLog, taskOutputFile, Path.GetFileName(taskOutputFile)); await taskOutputStorage.SaveAsync(TaskOutputKind.TaskLog, errOutputFile, Path.GetFileName(errOutputFile)); }); t.Wait(); } catch (AggregateException ae) { var errorText = new StringBuilder(); foreach (Exception ex in ae.InnerExceptions) { errorText.AppendLine(ex.Message); } Console.Error.Write(errorText.ToString()); Console.WriteLine("Could not write output to batch storage. Check the standard error output for details. Processing information: \n {0}", File.ReadAllText(taskOutputFile)); } } else { Console.WriteLine("Done. Press any key to continue"); Console.ReadLine(); } }
public async Task CannotPassANullKindWhenSaving() { var ex = await Assert.ThrowsAsync <ArgumentNullException>(() => _storage.SaveAsync(null, "test.txt")); Assert.Equal("kind", ex.ParamName); }
public static async Task<int> RunTaskAsync() { // Obtain service-defined environment variables string jobId = Environment.GetEnvironmentVariable("AZ_BATCH_JOB_ID"); string taskId = Environment.GetEnvironmentVariable("AZ_BATCH_TASK_ID"); // Obtain the custom environment variable we set in the client application string jobContainerUrl = Environment.GetEnvironmentVariable("JOB_CONTAINER_URL"); // The task will use the TaskOutputStorage to store both its output and log updates TaskOutputStorage taskStorage = new TaskOutputStorage(new Uri(jobContainerUrl), taskId); // The primary task logic is wrapped in a using statement that sends updates to the // stdout.txt blob in Storage every 15 seconds while the task code runs. using (ITrackedSaveOperation stdout = await taskStorage.SaveTrackedAsync( TaskOutputKind.TaskLog, RootDir("stdout.txt"), "stdout.txt", TimeSpan.FromSeconds(15))) { string outputFile = $"results_{taskId}.txt"; string summaryFile = $"summary_{taskId}.txt"; using (StreamWriter output = File.CreateText(WorkingDir(outputFile))) { using (StreamWriter summary = File.CreateText(WorkingDir(summaryFile))) { output.WriteLine($"# Task {taskId}"); const int runCount = 1000000; int[] results = new int[runCount]; double resultTotal = 0; for (int i = 0; i < runCount; ++i) { int runResult = PerformSingleRunMonteCarloSimulation(); output.WriteLine($"{i}, {runResult}"); results[i] = runResult; resultTotal += runResult; if (i % 5000 == 0) { Console.WriteLine($"{DateTime.UtcNow}: Processing... done {i}"); } } double mean = resultTotal / runCount; double stddev = Math.Sqrt((from r in results let d = r - mean select d * d).Average()); summary.WriteLine($"Task: {taskId}"); summary.WriteLine($"Run count: {runCount}"); summary.WriteLine($"Mean: {mean}"); summary.WriteLine($"Std dev: {stddev}"); } } // Persist the task output to Azure Storage Task.WaitAll( taskStorage.SaveAsync(TaskOutputKind.TaskOutput, outputFile), taskStorage.SaveAsync(TaskOutputKind.TaskPreview, summaryFile) ); // We are tracking the disk file to save our standard output, but the node agent may take // up to 3 seconds to flush the stdout stream to disk. So give the file a moment to catch up. await Task.Delay(stdoutFlushDelay); return 0; } }
public static async Task <int> RunTaskAsync() { // Obtain service-defined environment variables string jobId = Environment.GetEnvironmentVariable("AZ_BATCH_JOB_ID"); string taskId = Environment.GetEnvironmentVariable("AZ_BATCH_TASK_ID"); // Obtain the custom environment variable we set in the client application string jobContainerUrl = Environment.GetEnvironmentVariable("JOB_CONTAINER_URL"); // The task will use the TaskOutputStorage to store both its output and log updates TaskOutputStorage taskStorage = new TaskOutputStorage(new Uri(jobContainerUrl), taskId); // The primary task logic is wrapped in a using statement that sends updates to the // stdout.txt blob in Storage every 15 seconds while the task code runs. using (ITrackedSaveOperation stdout = await taskStorage.SaveTrackedAsync( TaskOutputKind.TaskLog, RootDir("stdout.txt"), "stdout.txt", TimeSpan.FromSeconds(15))) { string outputFile = $"results_{taskId}.txt"; string summaryFile = $"summary_{taskId}.txt"; using (StreamWriter output = File.CreateText(WorkingDir(outputFile))) { using (StreamWriter summary = File.CreateText(WorkingDir(summaryFile))) { output.WriteLine($"# Task {taskId}"); const int runCount = 1000000; int[] results = new int[runCount]; double resultTotal = 0; for (int i = 0; i < runCount; ++i) { int runResult = PerformSingleRunMonteCarloSimulation(); output.WriteLine($"{i}, {runResult}"); results[i] = runResult; resultTotal += runResult; if (i % 5000 == 0) { Console.WriteLine($"{DateTime.UtcNow}: Processing... done {i}"); } } double mean = resultTotal / runCount; double stddev = Math.Sqrt((from r in results let d = r - mean select d * d).Average()); summary.WriteLine($"Task: {taskId}"); summary.WriteLine($"Run count: {runCount}"); summary.WriteLine($"Mean: {mean}"); summary.WriteLine($"Std dev: {stddev}"); } } // Persist the task output to Azure Storage Task.WaitAll( taskStorage.SaveAsync(TaskOutputKind.TaskOutput, outputFile), taskStorage.SaveAsync(TaskOutputKind.TaskPreview, summaryFile) ); // We are tracking the disk file to save our standard output, but the node agent may take // up to 3 seconds to flush the stdout stream to disk. So give the file a moment to catch up. await Task.Delay(stdoutFlushDelay); return(0); } }