public static AzureBenchmarkResult ToAzureBenchmarkResult(BenchmarkResult b) { if (b == null) { throw new ArgumentNullException(nameof(b)); } AzureBenchmarkResult azureResult = new AzureBenchmarkResult(); azureResult.AcquireTime = b.AcquireTime; azureResult.BenchmarkFileName = b.BenchmarkFileName; azureResult.ExitCode = b.ExitCode; azureResult.ExperimentID = b.ExperimentID; azureResult.NormalizedCPUTime = b.NormalizedCPUTime; azureResult.CPUTime = b.CPUTime; azureResult.WallClockTime = b.WallClockTime; azureResult.PeakMemorySizeMB = b.PeakMemorySizeMB; azureResult.Properties = new Dictionary <string, string>(); foreach (var prop in b.Properties) { azureResult.Properties.Add(prop.Key, prop.Value); } azureResult.Status = b.Status; azureResult.StdOut = string.Empty; azureResult.StdOutExtStorageIdx = string.Empty; azureResult.StdErr = string.Empty; azureResult.StdErrExtStorageIdx = string.Empty; return(azureResult); }
protected AzureBenchmarkResult[] GetFromCache(ExperimentID id, CloudBlob blob, ExperimentManager.BenchmarkFilter f = null) { DateTime before = DateTime.Now; AzureBenchmarkResult[] res = null; try { blob.FetchAttributes(); string dir = Path.Combine(Path.GetTempPath(), "z3nightly-results"); Directory.CreateDirectory(dir); string file = Path.Combine(dir, GetResultsFileName(id)); if (File.Exists(file) && blob.Properties.LastModified.HasValue && File.GetLastWriteTimeUtc(file) > blob.Properties.LastModified.Value) { using (var stream = new FileStream(file, FileMode.Open)) { res = AzureBenchmarkResult.LoadBenchmarks(id, stream, f); Debug.Print("Job #{0}: cache hit, load time: {1:n2} sec", id, (DateTime.Now - before).TotalSeconds); } } } catch (Exception ex) { Debug.Print("Exception caught while reading from cache: " + ex.Message); Debug.Print("Stack Trace: " + ex.StackTrace); } return(res); }
private static AzureBenchmarkResult ToAzureResult(BenchmarkResult b, AzureBenchmarkResult externalOutput) { AzureBenchmarkResult azureResult = AzureExperimentStorage.ToAzureBenchmarkResult(b); if (externalOutput != null) { azureResult.StdOut = externalOutput.StdOut; azureResult.StdOutExtStorageIdx = externalOutput.StdOutExtStorageIdx; azureResult.StdErr = externalOutput.StdErr; azureResult.StdErrExtStorageIdx = externalOutput.StdErrExtStorageIdx; } else { b.StdOut.Position = 0; azureResult.StdOut = Utils.StreamToString(b.StdOut, true); azureResult.StdOutExtStorageIdx = string.Empty; b.StdErr.Position = 0; azureResult.StdErr = Utils.StreamToString(b.StdErr, true); azureResult.StdErrExtStorageIdx = string.Empty; } return(azureResult); }
public async Task <Tuple <AzureBenchmarkResult[], string> > GetAzureExperimentResults(ExperimentID experimentId) { AzureBenchmarkResult[] results; string blobName = GetResultBlobName(experimentId); var blob = resultsContainer.GetBlobReference(blobName); try { using (MemoryStream zipStream = new MemoryStream(4 << 20)) { await blob.DownloadToStreamAsync(zipStream, AccessCondition.GenerateEmptyCondition(), new Microsoft.WindowsAzure.Storage.Blob.BlobRequestOptions { RetryPolicy = new Microsoft.WindowsAzure.Storage.RetryPolicies.ExponentialRetry(TimeSpan.FromMilliseconds(100), 10) }, null); zipStream.Position = 0; using (ZipArchive zip = new ZipArchive(zipStream, ZipArchiveMode.Read)) { var entry = zip.GetEntry(GetResultsFileName(experimentId)); using (var tableStream = entry.Open()) { results = AzureBenchmarkResult.LoadBenchmarks(experimentId, tableStream); return(Tuple.Create(results, blob.Properties.ETag)); } } } } catch (StorageException ex) when(ex.RequestInformation.HttpStatusCode == 404) // Not found == no results { return(Tuple.Create(new AzureBenchmarkResult[0], (string)null)); } }
public async Task DeleteOutputs(AzureBenchmarkResult azureResult) { var stdoutBlobName = BlobNameForStdOut(azureResult.ExperimentID, azureResult.BenchmarkFileName, azureResult.StdOutExtStorageIdx); var stdoutBlob = outputContainer.GetBlockBlobReference(stdoutBlobName); var stderrBlobName = BlobNameForStdOut(azureResult.ExperimentID, azureResult.BenchmarkFileName, azureResult.StdErrExtStorageIdx); var stderrBlob = outputContainer.GetBlockBlobReference(stderrBlobName); await Task.WhenAll( stdoutBlob.DeleteIfExistsAsync(DeleteSnapshotsOption.IncludeSnapshots, AccessCondition.GenerateEmptyCondition(), new BlobRequestOptions { RetryPolicy = retryPolicy }, null), stderrBlob.DeleteIfExistsAsync(DeleteSnapshotsOption.IncludeSnapshots, AccessCondition.GenerateEmptyCondition(), new BlobRequestOptions { RetryPolicy = retryPolicy }, null)); }
/// <summary> /// Puts the benchmark results of the given experiment to the storage. /// </summary> /// <param name="results">All results must have same experiment id. /// <returns>Blob etag, if results have been uploaded. /// Null, if the precondition failed and nothing was uploaded.</returns> public async Task <string> PutAzureExperimentResults(int expId, AzureBenchmarkResult[] results, UploadBlobMode mode, string etag = null) { string fileName = GetResultsFileName(expId); using (MemoryStream zipStream = new MemoryStream()) { using (var zip = new ZipArchive(zipStream, ZipArchiveMode.Create, true)) { var entry = zip.CreateEntry(fileName); AzureBenchmarkResult.SaveBenchmarks(results, entry.Open()); } zipStream.Position = 0; return(await UploadBlobAsync(resultsContainer, GetResultBlobName(expId), zipStream, mode, etag)); } }
protected async Task <AzureBenchmarkResult[]> GetFromStorage(ExperimentID id, CloudBlob blob, ExperimentManager.BenchmarkFilter f = null) { using (MemoryStream zipStream = new MemoryStream(1 << 16)) { DateTime before = DateTime.Now; await blob.DownloadToStreamAsync(zipStream, AccessCondition.GenerateEmptyCondition(), new Microsoft.WindowsAzure.Storage.Blob.BlobRequestOptions { RetryPolicy = new Microsoft.WindowsAzure.Storage.RetryPolicies.ExponentialRetry(TimeSpan.FromMilliseconds(100), 25) }, null); AzureBenchmarkResult[] res = null; zipStream.Position = 0; using (ZipArchive zip = new ZipArchive(zipStream, ZipArchiveMode.Read)) { string rfn = GetResultsFileName(id); var entry = zip.GetEntry(rfn); res = AzureBenchmarkResult.LoadBenchmarks(id, entry.Open(), f); DateTime before_cache = DateTime.Now; try { // If possible, save to cache. string dir = Path.Combine(Path.GetTempPath(), "z3nightly-results"); Directory.CreateDirectory(dir); string filename = Path.Combine(dir, rfn); using (FileStream file = File.Open(filename, FileMode.OpenOrCreate, FileAccess.Write)) using (var e = entry.Open()) await e.CopyToAsync(file); File.SetLastWriteTimeUtc(filename, before); } catch (Exception ex) { Debug.Print("Exception caught while saving to cache: {0}", ex.Message); Debug.Print("Stack Trace: " + ex.StackTrace); } Debug.Print("Job #{0}: cache save time: {1:n2} sec", id, (DateTime.Now - before_cache).TotalSeconds); } return(res); } }
public BenchmarkResult ParseAzureBenchmarkResult(AzureBenchmarkResult azureResult) { return(new BenchmarkResult( azureResult.ExperimentID, azureResult.BenchmarkFileName, azureResult.AcquireTime, azureResult.NormalizedCPUTime, azureResult.CPUTime, azureResult.WallClockTime, azureResult.PeakMemorySizeMB, azureResult.Status, azureResult.ExitCode, string.IsNullOrEmpty(azureResult.StdOutExtStorageIdx) ? Utils.StringToStream(azureResult.StdOut) : new LazyBlobStream(outputContainer.GetBlobReference(BlobNameForStdOut(azureResult.ExperimentID, azureResult.BenchmarkFileName, azureResult.StdOutExtStorageIdx))), string.IsNullOrEmpty(azureResult.StdErrExtStorageIdx) ? Utils.StringToStream(azureResult.StdErr) : new LazyBlobStream(outputContainer.GetBlobReference(BlobNameForStdErr(azureResult.ExperimentID, azureResult.BenchmarkFileName, azureResult.StdErrExtStorageIdx))), new ReadOnlyDictionary <string, string>(azureResult.Properties) )); }
public static async Task <AzureBenchmarkResult> PrepareBenchmarkResult(BenchmarkResult result, CloudBlobContainer outputContainer) { AzureBenchmarkResult azureResult = ToAzureBenchmarkResult(result); if (result.StdOut.Length > MaxStdOutLength) { int i = -1; string etag; string stdoutBlobId; do { ++i; stdoutBlobId = BlobNameForStdOut(result.ExperimentID, result.BenchmarkFileName, i.ToString()); etag = await UploadBlobAsync(outputContainer, stdoutBlobId, result.StdOut, UploadBlobMode.CreateNew); } while (etag == null); // until we find blob name for which there is no existing blob Trace.WriteLine(string.Format("Uploaded stdout for experiment {0}", result.ExperimentID)); azureResult.StdOut = null; azureResult.StdOutExtStorageIdx = i.ToString(); } else { if (result.StdOut.Length > 0) { long pos = 0; if (result.StdOut.CanSeek) { pos = result.StdOut.Position; result.StdOut.Seek(0, SeekOrigin.Begin); } using (StreamReader sr = new StreamReader(result.StdOut)) { azureResult.StdOut = await sr.ReadToEndAsync(); } if (result.StdOut.CanSeek) { result.StdOut.Seek(pos, SeekOrigin.Begin); } } else { azureResult.StdOut = ""; } azureResult.StdOutExtStorageIdx = ""; } if (result.StdErr.Length > MaxStdErrLength) { int i = -1; string etag; string stderrBlobId; do { ++i; stderrBlobId = BlobNameForStdErr(result.ExperimentID, result.BenchmarkFileName, i.ToString()); etag = await UploadBlobAsync(outputContainer, stderrBlobId, result.StdErr, UploadBlobMode.CreateNew); }while (etag == null); Trace.WriteLine(string.Format("Uploaded stderr for experiment {0}", result.ExperimentID)); azureResult.StdErr = null; azureResult.StdErrExtStorageIdx = i.ToString(); } else { if (result.StdErr.Length > 0) { long pos = 0; if (result.StdErr.CanSeek) { pos = result.StdErr.Position; result.StdErr.Seek(0, SeekOrigin.Begin); } using (StreamReader sr = new StreamReader(result.StdErr)) { azureResult.StdErr = await sr.ReadToEndAsync(); } if (result.StdErr.CanSeek) { result.StdErr.Seek(pos, SeekOrigin.Begin); } } else { azureResult.StdErr = ""; } azureResult.StdErrExtStorageIdx = ""; } return(azureResult); }
public static AzureBenchmarkResult[] LoadBenchmarks(int expId, Stream stream, ExperimentManager.BenchmarkFilter f) { DateTime before = DateTime.Now; var table = Table.Load(new StreamReader(stream), new ReadSettings(Delimiter.Comma, true, true, FSharpOption <int> .None, FSharpOption <FSharpFunc <Tuple <int, string>, FSharpOption <Type> > > .Some(FSharpFunc <Tuple <int, string>, FSharpOption <Type> > .FromConverter(tuple => FSharpOption <Type> .Some(typeof(string)))))); var load_time = (DateTime.Now - before).TotalSeconds; before = DateTime.Now; var fileName = table["BenchmarkFileName"].Rows.AsString; var acq = table["AcquireTime"].Rows.AsString; var norm = table["NormalizedRuntime"].Rows.AsString; var runtime = table["TotalProcessorTime"].Rows.AsString; var wctime = table["WallClockTime"].Rows.AsString; var mem = table["PeakMemorySizeMB"].Rows.AsString; var stat = table["Status"].Rows.AsString; var exitcode = table["ExitCode"].Rows.AsString; var stdout = table["StdOut"].Rows.AsString; var stdoutext = table["StdOutExtStorageIdx"].Rows.AsString; var stderr = table["StdErr"].Rows.AsString; var stderrext = table["StdErrExtStorageIdx"].Rows.AsString; var propColumns = (from c in table where c.Name != "BenchmarkFileName" && c.Name != "AcquireTime" && c.Name != "NormalizedRuntime" && c.Name != "TotalProcessorTime" && c.Name != "WallClockTime" && c.Name != "PeakMemorySizeMB" && c.Name != "Status" && c.Name != "ExitCode" && c.Name != "StdOut" && c.Name != "StdErr" && c.Name != "StdOutExtStorageIdx" && c.Name != "StdErrExtStorageIdx" select Tuple.Create(c.Name, c.Rows.AsString)) .ToArray(); List <AzureBenchmarkResult> results = new List <AzureBenchmarkResult>(); int num_rows = table.RowsCount; for (int i = 0; i < num_rows; i++) { if (f != null && !f(fileName[i])) { continue; } Dictionary <string, string> props = new Dictionary <string, string>(propColumns.Length); foreach (var pc in propColumns) { if (pc.Item2 != null) { props[pc.Item1] = pc.Item2[i]; } } AzureBenchmarkResult r = new AzureBenchmarkResult(); r.AcquireTime = DateTime.Parse(acq[i], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); r.BenchmarkFileName = fileName[i]; r.ExitCode = string.IsNullOrEmpty(exitcode[i]) ? null : (int?)int.Parse(exitcode[i], CultureInfo.InvariantCulture); r.ExperimentID = expId; r.NormalizedCPUTime = double.Parse(norm[i], CultureInfo.InvariantCulture); r.PeakMemorySizeMB = double.Parse(mem[i], CultureInfo.InvariantCulture); r.Properties = props; r.Status = StatusFromString(stat[i]); r.StdErr = stderr[i]; r.StdErrExtStorageIdx = stderrext[i]; r.StdOut = stdout[i]; r.StdOutExtStorageIdx = stdoutext[i]; r.CPUTime = TimeSpan.FromSeconds(double.Parse(runtime[i], CultureInfo.InvariantCulture)); r.WallClockTime = TimeSpan.FromSeconds(double.Parse(wctime[i], CultureInfo.InvariantCulture)); results.Add(r); } AzureBenchmarkResult[] ra = results.ToArray(); var conv_time = (DateTime.Now - before).TotalSeconds; System.Diagnostics.Debug.Print("Job #{0}: table load time {1:n2} sec, conversion time {2:n2} sec", expId, load_time, conv_time); return(ra); }
public override async Task <Dictionary <BenchmarkResult, BenchmarkResult> > TryUpdateStatus(IEnumerable <BenchmarkResult> toModify, ResultStatus status) { if (toModify == null) { throw new ArgumentNullException(nameof(toModify)); } var mod = new Dictionary <BenchmarkResult, BenchmarkResult>(); foreach (var oldRes in toModify) { mod.Add(oldRes, null); } if (mod.Count == 0) { return(mod); } int n = Benchmarks.Length; var newBenchmarks = (BenchmarkResult[])Benchmarks.Clone(); var newAzureBenchmarks = new AzureBenchmarkResult[n]; for (int i = 0; i < n; i++) { var b = newBenchmarks[i]; AzureBenchmarkResult azureResult; if (mod.ContainsKey(b)) { if (b.Status != status) // updating status of this result { newBenchmarks[i] = new BenchmarkResult(b.ExperimentID, b.BenchmarkFileName, b.AcquireTime, b.NormalizedCPUTime, b.CPUTime, b.WallClockTime, b.PeakMemorySizeMB, status, // <-- new status b.ExitCode, b.StdOut, b.StdErr, b.Properties); azureResult = ToAzureResult(newBenchmarks[i], TryGetExternalOutput(b)); mod[b] = newBenchmarks[i]; } else // status is as required already { azureResult = ToAzureResult(b, TryGetExternalOutput(b)); mod.Remove(b); } } else // result doesn't change { azureResult = ToAzureResult(b, TryGetExternalOutput(b)); } newAzureBenchmarks[i] = azureResult; } if (mod.Count == 0) { return(new Dictionary <BenchmarkResult, BenchmarkResult>()); // no changes } foreach (var item in mod) { if (item.Value == null) { throw new ArgumentException("Some of the given results to update do not belong to the experiment results"); } } string newEtag = await Upload(newAzureBenchmarks); if (newEtag == null) { return(null); } // Update benchmarks array etag = newEtag; Replace(newBenchmarks.ToArray()); foreach (var item in externalOutputs.ToArray()) { BenchmarkResult oldB = item.Key; BenchmarkResult newB; if (!mod.TryGetValue(oldB, out newB)) { continue; } AzureBenchmarkResult ar; if (externalOutputs.TryGetValue(oldB, out ar)) { externalOutputs.Remove(oldB); externalOutputs.Add(newB, ar); } } return(mod); }
public static AzureBenchmarkResult[] LoadBenchmarks(int expId, Stream stream) { var table = Table.Load(new StreamReader(stream), new ReadSettings(Delimiter.Comma, true, true, FSharpOption <int> .None, FSharpOption <FSharpFunc <Tuple <int, string>, FSharpOption <Type> > > .Some(FSharpFunc <Tuple <int, string>, FSharpOption <Type> > .FromConverter(tuple => FSharpOption <Type> .Some(typeof(string)))))); var fileName = table["BenchmarkFileName"].Rows.AsString; var acq = table["AcquireTime"].Rows.AsString; var norm = table["NormalizedRuntime"].Rows.AsString; var runtime = table["TotalProcessorTime"].Rows.AsString; var wctime = table["WallClockTime"].Rows.AsString; var mem = table["PeakMemorySizeMB"].Rows.AsString; var stat = table["Status"].Rows.AsString; var exitcode = table["ExitCode"].Rows.AsString; var stdout = table["StdOut"].Rows.AsString; var stdoutext = table["StdOutExtStorageIdx"].Rows.AsString; var stderr = table["StdErr"].Rows.AsString; var stderrext = table["StdErrExtStorageIdx"].Rows.AsString; var propColumns = (from c in table where c.Name != "BenchmarkFileName" && c.Name != "AcquireTime" && c.Name != "NormalizedRuntime" && c.Name != "TotalProcessorTime" && c.Name != "WallClockTime" && c.Name != "PeakMemorySizeMB" && c.Name != "Status" && c.Name != "ExitCode" && c.Name != "StdOut" && c.Name != "StdErr" && c.Name != "StdOutExtStorageIdx" && c.Name != "StdErrExtStorageIdx" select Tuple.Create(c.Name, c.Rows.AsString)) .ToArray(); AzureBenchmarkResult[] results = new AzureBenchmarkResult[table.RowsCount]; for (int i = 0; i < results.Length; i++) { Dictionary <string, string> props = new Dictionary <string, string>(propColumns.Length); foreach (var pc in propColumns) { if (pc.Item2 != null) { props[pc.Item1] = pc.Item2[i]; } } results[i] = new AzureBenchmarkResult(); results[i].AcquireTime = DateTime.Parse(acq[i], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); results[i].BenchmarkFileName = fileName[i]; results[i].ExitCode = string.IsNullOrEmpty(exitcode[i]) ? null : (int?)int.Parse(exitcode[i], CultureInfo.InvariantCulture); results[i].ExperimentID = expId; results[i].NormalizedRuntime = double.Parse(norm[i], CultureInfo.InvariantCulture); results[i].PeakMemorySizeMB = double.Parse(mem[i], CultureInfo.InvariantCulture); results[i].Properties = props; results[i].Status = StatusFromString(stat[i]); results[i].StdErr = stderr[i]; results[i].StdErrExtStorageIdx = stderrext[i]; results[i].StdOut = stdout[i]; results[i].StdOutExtStorageIdx = stdoutext[i]; results[i].TotalProcessorTime = TimeSpan.FromSeconds(double.Parse(runtime[i], CultureInfo.InvariantCulture)); results[i].WallClockTime = TimeSpan.FromSeconds(double.Parse(wctime[i], CultureInfo.InvariantCulture)); } return(results); }
public override async Task <Dictionary <BenchmarkResult, BenchmarkResult> > TryUpdateStatus(IEnumerable <BenchmarkResult> toModify, ResultStatus status) { if (toModify == null) { throw new ArgumentNullException(nameof(toModify)); } var mod = new Dictionary <BenchmarkResult, BenchmarkResult>(); foreach (var oldRes in toModify) { mod.Add(oldRes, null); } if (mod.Count == 0) { return(mod); } int n = Benchmarks.Length; var newBenchmarks = (BenchmarkResult[])Benchmarks.Clone(); var newAzureBenchmarks = new AzureBenchmarkResult[n]; for (int i = 0; i < n; i++) { var b = newBenchmarks[i]; if (mod.ContainsKey(b)) { if (b.Status != status) // updating status of this result { newBenchmarks[i] = new BenchmarkResult(b.ExperimentID, b.BenchmarkFileName, b.AcquireTime, b.NormalizedRuntime, b.TotalProcessorTime, b.WallClockTime, b.PeakMemorySizeMB, status, // <-- new status b.ExitCode, b.StdOut, b.StdErr, b.Properties); newAzureBenchmarks[i] = AzureExperimentStorage.ToAzureBenchmarkResult(newBenchmarks[i]); mod[b] = newBenchmarks[i]; } else // status is as required already { newAzureBenchmarks[i] = AzureExperimentStorage.ToAzureBenchmarkResult(b); mod.Remove(b); } } else // result doesn't change { newAzureBenchmarks[i] = AzureExperimentStorage.ToAzureBenchmarkResult(b); } } if (mod.Count == 0) { return(new Dictionary <BenchmarkResult, BenchmarkResult>()); // no changes } foreach (var item in mod) { if (item.Value == null) { throw new ArgumentException("Some of the given results to update do not belong to the experiment results"); } } bool success = await Upload(newAzureBenchmarks); if (!success) { return(null); } // Update benchmarks array Replace(newBenchmarks.ToArray()); return(mod); }