private static void StarteBatchOperations() { try { BatchOperations.TriggerAureBatch(); } catch (Exception ex) { //gingHelper.LogTraceException(ex, MethodBase.GetCurrentMethod().DeclaringType.FullName, MethodBase.GetCurrentMethod().Name); } }
private static void StarteBatchOperations(DataUploadServiceBusProperties message) { try { //BatchOperations.TriggerAureBatch(message); BatchOperations.TriggerAureBatch(); } catch (Exception ex) { //gingHelper.LogTraceException(ex, MethodBase.GetCurrentMethod().DeclaringType.FullName, MethodBase.GetCurrentMethod().Name); } }
private static async Task MonitorAsync(BatchJob batchJob) { //Creating BatchOperations BatchOperations batchOperations = new BatchOperations(authConfig.BatchAccountName, authConfig.BatchAccountKey, authConfig.BatchAccountUrl); //Creating BlobOperations BlobOperations blobOperations = new BlobOperations(authConfig.StorageAccountName, authConfig.StorageAccountKey); //Monitor tasks var taskResult = await batchOperations.MonitorTasks ( jobId : JobId, timeout : TimeSpan.FromHours(config.TaskWaitHours) ); var outputFolderPath = Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.OutputFolderPath)); await blobOperations.DownloadOutputFiles(outputFolderPath, batchJob.OutputContainerID); try { PSharpOperations.MergeOutputCoverageReport(outputFolderPath, Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.PSharpBinariesFolderPath))); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); } //All task completed Console.WriteLine(); //Console.Write("Delete job? [yes] no: "); //string response = Console.ReadLine().ToLower(); if (/*response == "y" || response == "yes"*/ config.DeleteJobAfterDone) { await batchOperations.DeleteJobAsync(JobId); } Console.WriteLine(); //Console.Write("Delete Containers? [yes] no: "); //response = Console.ReadLine().ToLower(); if (/*response == "y" || response == "yes"*/ config.DeleteContainerAfterDone) { await blobOperations.DeleteAllContainers(batchJob); } if (config.DeletePoolAfterDone) { await blobOperations.DeleteNodeContainer(config.PoolId); await batchOperations.DeletePoolAsync(config.PoolId); } }
private static async Task MainAsync() { //Creating BatchOperations BatchOperations batchOperations = new BatchOperations(authConfig.BatchAccountName, authConfig.BatchAccountKey, authConfig.BatchAccountUrl); //Creating BlobOperations BlobOperations blobOperations = new BlobOperations(authConfig.StorageAccountName, authConfig.StorageAccountKey); //Pool operations if (!(await batchOperations.CheckIfPoolExists(config.PoolId))) { //Upload the application and the dependencies to azure storage and get the resource objects. var nodeFiles = await blobOperations.UploadNodeFiles(config.PSharpBinariesFolderPath, config.PoolId); //Creating the pool await batchOperations.CreatePoolIfNotExistAsync ( poolId : config.PoolId, resourceFiles : nodeFiles, numberOfNodes : config.NumberOfNodesInPool, OSFamily : config.NodeOsFamily, VirtualMachineSize : config.NodeVirtualMachineSize, NodeStartCommand : PSharpBatchTestCommon.Constants.PSharpDefaultNodeStartCommand, NodeMaxConcurrentTasks : config.NodeMaxConcurrentTasks ); } string executingDirectory = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); //Job Details string jobManagerFilePath = /*typeof(PSharpBatchJobManager.Program).Assembly.Location;*/ Path.Combine(executingDirectory, @".\PSharpBatchJobManager\PSharpBatchJobManager.exe"); // Data files for Job Manager Task string jobTimeStamp = PSharpBatchTestCommon.Constants.GetTimeStamp(); JobId = config.JobDefaultId + jobTimeStamp; //Creating BatchJob object var batchJob = new BatchJob(); batchJob.PoolID = config.PoolId; batchJob.JobID = JobId; //Uploading the data files to azure storage and get the resource objects. var inputTupleRes = await blobOperations.UploadInputFilesFromTestEntities(config.TestEntities, config.PoolId, JobId); var inputFilesDict = inputTupleRes.Item1; batchJob.InputContainerIDs = inputTupleRes.Item2; //Uploading JobManager Files var jobTupleRes = await blobOperations.UploadJobManagerFiles(jobManagerFilePath, config.PoolId, JobId); var jobManagerFiles = jobTupleRes.Item1; batchJob.JobManagerContainerID = jobTupleRes.Item2; batchJob.OutputContainerID = await blobOperations.CreateOutputContainer(config.PoolId, JobId); var outputContainerSasUrl = blobOperations.GetOutputContainerSasUrl(batchJob.OutputContainerID); var numberOfTasks = config.TestEntities.Select(t => t.NumberOfTasks()).Sum(); //Creating the job await batchOperations.CreateJobAsync ( jobId : JobId, poolId : config.PoolId, resourceFiles : jobManagerFiles, outputContainerSasUrl : outputContainerSasUrl, numberOfTasks : numberOfTasks, timeoutInHours : config.TaskWaitHours ); //Adding tasks await batchOperations.AddTasksFromTestEntities ( jobId : JobId, taskIDPrefix : config.TaskDefaultId, inputFilesDict : inputFilesDict, TestEntities : config.TestEntities ); var outputFolderPath = Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.OutputFolderPath)); //Write BatchJob to file Directory.CreateDirectory(outputFolderPath); var batchJobPath = Path.Combine(outputFolderPath, "batchjob.psbatch"); batchJob.SaveAsXML(batchJobPath); Logger.FlushLogs(); if (config.MonitorBatch) { await MonitorAsync(batchJob); } }