public void BuildExperimentsSummary() { var domain = new Z3Domain(); var benchmarkResults1 = Enumerable.Concat(BuildResults(1, 3, "a"), BuildResults(1, 2, "b")); var catSummary1 = ExperimentSummary.Build(benchmarkResults1, domain); Assert.AreEqual(2, catSummary1.Count, "2 categories"); AssertCatSummary(3, catSummary1["a"]); AssertCatSummary(2, catSummary1["b"]); Table table0 = Table.Empty; var experimentSummary1 = new ExperimentSummary(1, DateTimeOffset.Now, catSummary1); Table table1 = ExperimentSummaryStorage.AppendOrReplace(table0, experimentSummary1); var benchmarkResults2 = Enumerable.Concat(BuildResults(1, 3, "b"), BuildResults(1, 2, "c")); var catSummary2 = ExperimentSummary.Build(benchmarkResults2, domain); var experimentSummary2 = new ExperimentSummary(2, DateTimeOffset.Now, catSummary2); Table table2 = ExperimentSummaryStorage.AppendOrReplace(table1, experimentSummary2); Assert.IsTrue(table2.Count >= 2 + 3 * (5 + 8), "Number of columns"); Assert.AreEqual(2, table2.RowsCount, "Number of rows"); AreEqualArrays(new[] { "1", "2" }, table2["ID"].Rows.AsString.ToArray()); AreEqualArrays(new[] { "3", "" }, table2["a|SAT"].Rows.AsString.ToArray()); AreEqualArrays(new[] { "2", "3" }, table2["b|SAT"].Rows.AsString.ToArray()); AreEqualArrays(new[] { "", "2" }, table2["c|SAT"].Rows.AsString.ToArray()); }
public void BuildExperimentsRecords() { var domain = new Z3Domain(); var benchmarkResults1 = Enumerable.Concat(BuildResults(1, 3, "a"), BuildResults(1, 2, "b")); var records1 = new Records.RecordsTable(new Dictionary <string, Records.Record>(), new Dictionary <string, Records.CategoryRecord>()); records1.UpdateWith(benchmarkResults1, domain); Assert.AreEqual(3, records1.CategoryRecords["a"].Files); Assert.AreEqual(3, records1.CategoryRecords["a"].Runtime); Assert.AreEqual(3 + 2, records1.BenchmarkRecords.Count); Assert.AreEqual(1, records1.BenchmarkRecords["a/file0"].ExperimentId); Assert.AreEqual(1, records1.BenchmarkRecords["a/file0"].Runtime); Assert.AreEqual(1, records1.BenchmarkRecords["b/file0"].ExperimentId); Assert.AreEqual(1, records1.BenchmarkRecords["b/file0"].Runtime); Assert.AreEqual(2, records1.CategoryRecords["b"].Files); Assert.AreEqual(2, records1.CategoryRecords["b"].Runtime); }
public async override Task RestartBenchmarks(int id, IEnumerable <string> benchmarkNames, string newBenchmarkContainerUri = null) { if (!CanStart) { throw new InvalidOperationException("Cannot start experiment since the manager is in read mode"); } var exp = await storage.GetExperiment(id); if (newBenchmarkContainerUri == null) { if (exp.BenchmarkContainerUri != ExperimentDefinition.DefaultContainerUri) { throw new ArgumentException("No newBenchmarkContainerUri provided, but experiment uses a non-default container."); } else { newBenchmarkContainerUri = ExperimentDefinition.DefaultContainerUri; } } var refExp = await storage.GetReferenceExperiment(); var poolId = this.BatchPoolID; var jobId = BuildJobId(id); string tempBlobName = Guid.NewGuid().ToString(); await storage.TempBlobContainer.GetBlockBlobReference(tempBlobName).UploadTextAsync(string.Join("\n", benchmarkNames)); using (var bc = BatchClient.Open(batchCreds)) { //var pool = await bc.PoolOperations.GetPoolAsync(poolId); try { await bc.JobOperations.DeleteJobAsync(jobId); } catch (BatchException batchExc) when(batchExc.RequestInformation != null && batchExc.RequestInformation.HttpStatusCode.HasValue && batchExc.RequestInformation.HttpStatusCode == System.Net.HttpStatusCode.NotFound) { //Not found - nothing to delete } CloudJob job = bc.JobOperations.CreateJob(); job.Id = jobId; job.OnAllTasksComplete = OnAllTasksComplete.TerminateJob; job.PoolInformation = new PoolInformation { PoolId = poolId }; job.JobPreparationTask = new JobPreparationTask { CommandLine = "cmd /c (robocopy %AZ_BATCH_TASK_WORKING_DIR% %AZ_BATCH_NODE_SHARED_DIR%\\%AZ_BATCH_JOB_ID% /e /purge) ^& IF %ERRORLEVEL% LEQ 1 exit 0", ResourceFiles = new List <ResourceFile>(), WaitForSuccess = true }; SharedAccessBlobPolicy sasConstraints = new SharedAccessBlobPolicy { SharedAccessExpiryTime = DateTime.UtcNow.AddHours(48), Permissions = SharedAccessBlobPermissions.Read }; foreach (CloudBlockBlob blob in storage.ListAzureWorkerBlobs()) { string sasBlobToken = blob.GetSharedAccessSignature(sasConstraints); string blobSasUri = String.Format("{0}{1}", blob.Uri, sasBlobToken); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(blobSasUri, blob.Name)); } string executableFolder = "exec"; job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(storage.GetExecutableSasUri(exp.Executable), Path.Combine(executableFolder, exp.Executable))); if (refExp != null) { string refContentFolder = "refdata"; string refBenchFolder = Path.Combine(refContentFolder, "data"); var refExpExecUri = storage.GetExecutableSasUri(refExp.Definition.Executable); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(refExpExecUri, Path.Combine(refContentFolder, refExp.Definition.Executable))); AzureBenchmarkStorage benchStorage; if (refExp.Definition.BenchmarkContainerUri == ExperimentDefinition.DefaultContainerUri) { benchStorage = storage.DefaultBenchmarkStorage; } else { benchStorage = new AzureBenchmarkStorage(refExp.Definition.BenchmarkContainerUri); } Domain refdomain; if (refExp.Definition.DomainName == "Z3") { refdomain = new Z3Domain(); } else { throw new InvalidOperationException("Reference experiment uses unknown domain."); } SortedSet <string> extensions; if (string.IsNullOrEmpty(refExp.Definition.BenchmarkFileExtension)) { extensions = new SortedSet <string>(refdomain.BenchmarkExtensions.Distinct()); } else { extensions = new SortedSet <string>(refExp.Definition.BenchmarkFileExtension.Split('|').Select(s => s.Trim().TrimStart('.')).Distinct()); } foreach (CloudBlockBlob blob in benchStorage.ListBlobs(refExp.Definition.BenchmarkDirectory, refExp.Definition.Category)) { string[] parts = blob.Name.Split('/'); string shortName = parts[parts.Length - 1]; var shortnameParts = shortName.Split('.'); if (shortnameParts.Length == 1 && !extensions.Contains("")) { continue; } var ext = shortnameParts[shortnameParts.Length - 1]; if (!extensions.Contains(ext)) { continue; } job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(benchStorage.GetBlobSASUri(blob), Path.Combine(refBenchFolder, shortName))); } } job.Constraints = new JobConstraints(); job.Constraints.MaxTaskRetryCount = MaxTaskRetryCount; AddStarterTask(id, tempBlobName, job, true, newBenchmarkContainerUri); bool failedToCommit = false; int tryBackAwayMultiplier = 1; int tryNo = 0; do { try { failedToCommit = false; await job.CommitAsync(); } catch (BatchException batchExc) when(batchExc.RequestInformation != null && batchExc.RequestInformation.HttpStatusCode.HasValue && batchExc.RequestInformation.HttpStatusCode == System.Net.HttpStatusCode.Conflict) { if (tryNo == 7)//arbitrarily picked constant { throw; } ++tryNo; failedToCommit = true; await Task.Run(() => System.Threading.Thread.Sleep(tryBackAwayMultiplier * 500)); tryBackAwayMultiplier = tryBackAwayMultiplier * 2; } }while (failedToCommit); } }
public async Task <bool> Reinforce(int id, ExperimentDefinition def) { if (!CanStart) { throw new InvalidOperationException("Cannot start experiment since the manager is in read mode"); } var refExp = await storage.GetReferenceExperiment(); ExperimentEntity ee = await storage.GetExperiment(id); string poolId = "z3-nightly"; Regex re = new Regex(@"^.*\(pool: ([^ ]*)\)$"); Match m = re.Match(ee.WorkerInformation); if (m.Success) { poolId = m.Groups[1].Value; } using (var bc = BatchClient.Open(batchCreds)) { var pool = await bc.PoolOperations.GetPoolAsync(poolId); CloudJob job = bc.JobOperations.CreateJob(); string jid_prefix = BuildJobId(id); string jid = ""; bool have_jid = false; int cnt = 1; while (!have_jid) { try { jid = String.Format("{0}-{1}", jid_prefix, cnt++); bc.JobOperations.GetJob(jid); } catch (BatchException) { have_jid = true; } } job.Id = jid; job.OnAllTasksComplete = OnAllTasksComplete.TerminateJob; job.PoolInformation = new PoolInformation { PoolId = poolId }; job.JobPreparationTask = new JobPreparationTask { CommandLine = "cmd /c (robocopy %AZ_BATCH_TASK_WORKING_DIR% %AZ_BATCH_NODE_SHARED_DIR%\\%AZ_BATCH_JOB_ID% /e /purge) ^& IF %ERRORLEVEL% LEQ 1 exit 0", ResourceFiles = new List <ResourceFile>(), WaitForSuccess = true }; SharedAccessBlobPolicy sasConstraints = new SharedAccessBlobPolicy { SharedAccessExpiryTime = DateTime.UtcNow.AddHours(48), Permissions = SharedAccessBlobPermissions.Read }; foreach (CloudBlockBlob blob in storage.ListAzureWorkerBlobs()) { string sasBlobToken = blob.GetSharedAccessSignature(sasConstraints); string blobSasUri = String.Format("{0}{1}", blob.Uri, sasBlobToken); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(blobSasUri, blob.Name)); } string executableFolder = "exec"; job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(storage.GetExecutableSasUri(def.Executable), Path.Combine(executableFolder, def.Executable))); AzureBenchmarkStorage benchStorage = benchStorage = storage.DefaultBenchmarkStorage; if (refExp != null) { string refContentFolder = "refdata"; string refBenchFolder = Path.Combine(refContentFolder, "data"); var refExpExecUri = storage.GetExecutableSasUri(refExp.Definition.Executable); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(refExpExecUri, Path.Combine(refContentFolder, refExp.Definition.Executable))); if (refExp.Definition.BenchmarkContainerUri != ExperimentDefinition.DefaultContainerUri) { benchStorage = new AzureBenchmarkStorage(refExp.Definition.BenchmarkContainerUri); } Domain refdomain; if (refExp.Definition.DomainName == "Z3") { refdomain = new Z3Domain(); } else { throw new InvalidOperationException("Reference experiment uses unknown domain."); } SortedSet <string> extensions; if (string.IsNullOrEmpty(refExp.Definition.BenchmarkFileExtension)) { extensions = new SortedSet <string>(refdomain.BenchmarkExtensions.Distinct()); } else { extensions = new SortedSet <string>(refExp.Definition.BenchmarkFileExtension.Split('|').Select(s => s.Trim().TrimStart('.')).Distinct()); } foreach (CloudBlockBlob blob in benchStorage.ListBlobs(refExp.Definition.BenchmarkDirectory, refExp.Definition.Category)) { string[] parts = blob.Name.Split('/'); string shortName = parts[parts.Length - 1]; var shortnameParts = shortName.Split('.'); if (shortnameParts.Length == 1 && !extensions.Contains("")) { continue; } var ext = shortnameParts[shortnameParts.Length - 1]; if (!extensions.Contains(ext)) { continue; } job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(benchStorage.GetBlobSASUri(blob), Path.Combine(refBenchFolder, shortName))); } } job.Constraints = new JobConstraints(); if (def.ExperimentTimeout != TimeSpan.Zero) { job.Constraints.MaxWallClockTime = def.ExperimentTimeout; } job.Constraints.MaxTaskRetryCount = MaxTaskRetryCount; string summaryName = ee.Creator != "Nightly" ? "" : "Z3Nightly"; AddStarterTask(id, summaryName, job, false, benchStorage.GetContainerSASUri()); } return(true); }
public override async Task <ExperimentID> StartExperiment(ExperimentDefinition definition, string creator = null, string note = null, string summaryName = null) { if (!CanStart) { throw new InvalidOperationException("Cannot start experiment since the manager is in read mode"); } var refExp = await storage.GetReferenceExperiment(); var poolId = this.BatchPoolID; int id; using (var bc = BatchClient.Open(batchCreds)) { var pool = await bc.PoolOperations.GetPoolAsync(poolId); id = await storage.AddExperiment(definition, DateTime.Now, creator, note, string.Format("{0} (pool: {1})", pool.VirtualMachineSize, poolId)); CloudJob job = bc.JobOperations.CreateJob(); job.Id = BuildJobId(id); job.OnAllTasksComplete = OnAllTasksComplete.TerminateJob; job.PoolInformation = new PoolInformation { PoolId = poolId }; job.JobPreparationTask = new JobPreparationTask { CommandLine = "cmd /c (robocopy %AZ_BATCH_TASK_WORKING_DIR% %AZ_BATCH_NODE_SHARED_DIR%\\%AZ_BATCH_JOB_ID% /e /purge) ^& IF %ERRORLEVEL% LEQ 1 exit 0", ResourceFiles = new List <ResourceFile>(), WaitForSuccess = true }; SharedAccessBlobPolicy sasConstraints = new SharedAccessBlobPolicy { SharedAccessExpiryTime = DateTime.UtcNow.AddHours(48), Permissions = SharedAccessBlobPermissions.Read }; foreach (CloudBlockBlob blob in storage.ListAzureWorkerBlobs()) { string sasBlobToken = blob.GetSharedAccessSignature(sasConstraints); string blobSasUri = String.Format("{0}{1}", blob.Uri, sasBlobToken); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(blobSasUri, blob.Name)); } string executableFolder = "exec"; job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(storage.GetExecutableSasUri(definition.Executable), Path.Combine(executableFolder, definition.Executable))); AzureBenchmarkStorage benchStorage = storage.DefaultBenchmarkStorage; if (refExp != null) { string refContentFolder = "refdata"; string refBenchFolder = Path.Combine(refContentFolder, "data"); var refExpExecUri = storage.GetExecutableSasUri(refExp.Definition.Executable); job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(refExpExecUri, Path.Combine(refContentFolder, refExp.Definition.Executable))); if (refExp.Definition.BenchmarkContainerUri != ExperimentDefinition.DefaultContainerUri) { benchStorage = new AzureBenchmarkStorage(refExp.Definition.BenchmarkContainerUri); } Domain refdomain; if (refExp.Definition.DomainName == "Z3") { refdomain = new Z3Domain(); } else { throw new InvalidOperationException("Reference experiment uses unknown domain."); } SortedSet <string> extensions; if (string.IsNullOrEmpty(refExp.Definition.BenchmarkFileExtension)) { extensions = new SortedSet <string>(refdomain.BenchmarkExtensions.Distinct()); } else { extensions = new SortedSet <string>(refExp.Definition.BenchmarkFileExtension.Split('|').Select(s => s.Trim().TrimStart('.')).Distinct()); } foreach (CloudBlockBlob blob in benchStorage.ListBlobs(refExp.Definition.BenchmarkDirectory, refExp.Definition.Category)) { string[] parts = blob.Name.Split('/'); string shortName = parts[parts.Length - 1]; var shortnameParts = shortName.Split('.'); if (shortnameParts.Length == 1 && !extensions.Contains("")) { continue; } var ext = shortnameParts[shortnameParts.Length - 1]; if (!extensions.Contains(ext)) { continue; } job.JobPreparationTask.ResourceFiles.Add(new ResourceFile(benchStorage.GetBlobSASUri(blob), Path.Combine(refBenchFolder, shortName))); } } job.Constraints = new JobConstraints(); if (definition.ExperimentTimeout != TimeSpan.Zero) { job.Constraints.MaxWallClockTime = definition.ExperimentTimeout; } job.Constraints.MaxTaskRetryCount = MaxTaskRetryCount; AddStarterTask(id, summaryName, job, false, benchStorage.GetContainerSASUri()); } return(id); }