public async Task <Tuple <AzureBenchmarkResult[], string> > GetAzureExperimentResults(ExperimentID experimentId, ExperimentManager.BenchmarkFilter f = null) { string blobName = GetResultBlobName(experimentId); CloudBlob blob = resultsContainer.GetBlobReference(blobName); try { AzureBenchmarkResult[] r = GetFromCache(experimentId, blob, f); r = r ?? await GetFromStorage(experimentId, blob, f); return(Tuple.Create(r, blob.Properties.ETag)); } catch (StorageException ex) when(ex.RequestInformation.HttpStatusCode == 404) // Not found == no results { return(Tuple.Create(new AzureBenchmarkResult[0], (string)null)); } }
protected async Task <AzureBenchmarkResult[]> GetFromStorage(ExperimentID id, CloudBlob blob, ExperimentManager.BenchmarkFilter f = null) { using (MemoryStream zipStream = new MemoryStream(1 << 16)) { DateTime before = DateTime.Now; await blob.DownloadToStreamAsync(zipStream, AccessCondition.GenerateEmptyCondition(), new Microsoft.WindowsAzure.Storage.Blob.BlobRequestOptions { RetryPolicy = new Microsoft.WindowsAzure.Storage.RetryPolicies.ExponentialRetry(TimeSpan.FromMilliseconds(100), 25) }, null); AzureBenchmarkResult[] res = null; zipStream.Position = 0; using (ZipArchive zip = new ZipArchive(zipStream, ZipArchiveMode.Read)) { string rfn = GetResultsFileName(id); var entry = zip.GetEntry(rfn); res = AzureBenchmarkResult.LoadBenchmarks(id, entry.Open(), f); DateTime before_cache = DateTime.Now; try { // If possible, save to cache. string dir = Path.Combine(Path.GetTempPath(), "z3nightly-results"); Directory.CreateDirectory(dir); string filename = Path.Combine(dir, rfn); using (FileStream file = File.Open(filename, FileMode.OpenOrCreate, FileAccess.Write)) using (var e = entry.Open()) await e.CopyToAsync(file); File.SetLastWriteTimeUtc(filename, before); } catch (Exception ex) { Debug.Print("Exception caught while saving to cache: {0}", ex.Message); Debug.Print("Stack Trace: " + ex.StackTrace); } Debug.Print("Job #{0}: cache save time: {1:n2} sec", id, (DateTime.Now - before_cache).TotalSeconds); } return(res); } }
public async Task <AzureExperimentResults> GetResults(ExperimentID experimentId, ExperimentManager.BenchmarkFilter f = null) { var result = await GetAzureExperimentResults(experimentId, f); AzureBenchmarkResult[] azureResults = result.Item1; string etag = result.Item2; return(new AzureExperimentResults(this, experimentId, azureResults, etag)); }
protected AzureBenchmarkResult[] GetFromCache(ExperimentID id, CloudBlob blob, ExperimentManager.BenchmarkFilter f = null) { DateTime before = DateTime.Now; AzureBenchmarkResult[] res = null; try { blob.FetchAttributes(); string dir = Path.Combine(Path.GetTempPath(), "z3nightly-results"); Directory.CreateDirectory(dir); string file = Path.Combine(dir, GetResultsFileName(id)); if (File.Exists(file) && blob.Properties.LastModified.HasValue && File.GetLastWriteTimeUtc(file) > blob.Properties.LastModified.Value) { using (var stream = new FileStream(file, FileMode.Open)) { res = AzureBenchmarkResult.LoadBenchmarks(id, stream, f); Debug.Print("Job #{0}: cache hit, load time: {1:n2} sec", id, (DateTime.Now - before).TotalSeconds); } } } catch (Exception ex) { Debug.Print("Exception caught while reading from cache: " + ex.Message); Debug.Print("Stack Trace: " + ex.StackTrace); } return(res); }
public static AzureBenchmarkResult[] LoadBenchmarks(int expId, Stream stream, ExperimentManager.BenchmarkFilter f) { DateTime before = DateTime.Now; var table = Table.Load(new StreamReader(stream), new ReadSettings(Delimiter.Comma, true, true, FSharpOption <int> .None, FSharpOption <FSharpFunc <Tuple <int, string>, FSharpOption <Type> > > .Some(FSharpFunc <Tuple <int, string>, FSharpOption <Type> > .FromConverter(tuple => FSharpOption <Type> .Some(typeof(string)))))); var load_time = (DateTime.Now - before).TotalSeconds; before = DateTime.Now; var fileName = table["BenchmarkFileName"].Rows.AsString; var acq = table["AcquireTime"].Rows.AsString; var norm = table["NormalizedRuntime"].Rows.AsString; var runtime = table["TotalProcessorTime"].Rows.AsString; var wctime = table["WallClockTime"].Rows.AsString; var mem = table["PeakMemorySizeMB"].Rows.AsString; var stat = table["Status"].Rows.AsString; var exitcode = table["ExitCode"].Rows.AsString; var stdout = table["StdOut"].Rows.AsString; var stdoutext = table["StdOutExtStorageIdx"].Rows.AsString; var stderr = table["StdErr"].Rows.AsString; var stderrext = table["StdErrExtStorageIdx"].Rows.AsString; var propColumns = (from c in table where c.Name != "BenchmarkFileName" && c.Name != "AcquireTime" && c.Name != "NormalizedRuntime" && c.Name != "TotalProcessorTime" && c.Name != "WallClockTime" && c.Name != "PeakMemorySizeMB" && c.Name != "Status" && c.Name != "ExitCode" && c.Name != "StdOut" && c.Name != "StdErr" && c.Name != "StdOutExtStorageIdx" && c.Name != "StdErrExtStorageIdx" select Tuple.Create(c.Name, c.Rows.AsString)) .ToArray(); List <AzureBenchmarkResult> results = new List <AzureBenchmarkResult>(); int num_rows = table.RowsCount; for (int i = 0; i < num_rows; i++) { if (f != null && !f(fileName[i])) { continue; } Dictionary <string, string> props = new Dictionary <string, string>(propColumns.Length); foreach (var pc in propColumns) { if (pc.Item2 != null) { props[pc.Item1] = pc.Item2[i]; } } AzureBenchmarkResult r = new AzureBenchmarkResult(); r.AcquireTime = DateTime.Parse(acq[i], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); r.BenchmarkFileName = fileName[i]; r.ExitCode = string.IsNullOrEmpty(exitcode[i]) ? null : (int?)int.Parse(exitcode[i], CultureInfo.InvariantCulture); r.ExperimentID = expId; r.NormalizedCPUTime = double.Parse(norm[i], CultureInfo.InvariantCulture); r.PeakMemorySizeMB = double.Parse(mem[i], CultureInfo.InvariantCulture); r.Properties = props; r.Status = StatusFromString(stat[i]); r.StdErr = stderr[i]; r.StdErrExtStorageIdx = stderrext[i]; r.StdOut = stdout[i]; r.StdOutExtStorageIdx = stdoutext[i]; r.CPUTime = TimeSpan.FromSeconds(double.Parse(runtime[i], CultureInfo.InvariantCulture)); r.WallClockTime = TimeSpan.FromSeconds(double.Parse(wctime[i], CultureInfo.InvariantCulture)); results.Add(r); } AzureBenchmarkResult[] ra = results.ToArray(); var conv_time = (DateTime.Now - before).TotalSeconds; System.Diagnostics.Debug.Print("Job #{0}: table load time {1:n2} sec, conversion time {2:n2} sec", expId, load_time, conv_time); return(ra); }