/// <summary> /// Download one or more files and process the .zip and .db files. /// </summary> /// <param name="outputs"></param> /// <param name="ShowProgress"></param> /// <param name="resultTempDirectory"></param> /// <param name="ct"></param> /// <returns></returns> private static async Task DownloadAndProcessFiles(List <CloudBlockBlob> outputs, Action <double> ShowProgress, string resultTempDirectory, CancellationToken ct) { // Download all run outputs. for (int i = 0; i < outputs.Count; i++) { ShowProgress(i / outputs.Count); CloudBlockBlob blob = outputs[i]; // todo: Download in parallel? var fullFileName = Path.Combine(resultTempDirectory, blob.Name); await blob.DownloadToFileAsync(fullFileName, FileMode.Create, ct); if (ct.IsCancellationRequested) { return; } } ShowProgress(100); // Extract any zip files. foreach (var zipFileName in Directory.GetFiles(resultTempDirectory, "*.zip")) { if (Path.GetFileName(zipFileName) != "model.zip") { // Extract the result files. using (ZipArchive zip = ZipFile.Open(zipFileName, ZipArchiveMode.Read, Encoding.UTF8)) zip.ExtractToDirectory(resultTempDirectory); } } try { // Merge results into a single .db file. var dbFiles = Directory.GetFiles(resultTempDirectory, "*.db"); var resultsDB = Path.Combine(resultTempDirectory, "Results.db"); DBMerger.MergeFiles(Path.Combine(resultTempDirectory, "*.db"), false, resultsDB); if (File.Exists(resultsDB)) { // Remove the individual .db files. foreach (string dbFileName in dbFiles) { File.Delete(dbFileName); } // Delete the zip file containing the .db files. foreach (var zipFileName in Directory.GetFiles(resultTempDirectory, "*.zip")) { if (Path.GetFileName(zipFileName) != "model.zip") { File.Delete(zipFileName); } } } } catch (Exception err) { throw new Exception($"Results were successfully extracted to {resultTempDirectory} but an error wasn encountered while attempting to merge the individual .db files", err); } }
/// <summary> /// Download the results of a job. /// </summary> /// <param name="options">Download options.</param> /// <param name="ct">Cancellation token.</param> /// <param name="ShowProgress">Function which reports progress (in range [0, 1]) to the user.</param> public async Task DownloadResultsAsync(DownloadOptions options, CancellationToken ct, Action<double> ShowProgress) { if (!Directory.Exists(options.Path)) Directory.CreateDirectory(options.Path); List<CloudBlockBlob> outputs = await GetJobOutputs(options.JobID, ct); // Build up a list of files to download. List<CloudBlockBlob> toDownload = new List<CloudBlockBlob>(); CloudBlockBlob results = outputs.Find(b => string.Equals(b.Name, resultsFileName, StringComparison.InvariantCultureIgnoreCase)); if (results != null) toDownload.Add(results); else // Always download debug files if no results archive can be found. options.DownloadDebugFiles = true; if (options.DownloadDebugFiles) toDownload.AddRange(outputs.Where(blob => debugFileFormats.Contains(Path.GetExtension(blob.Name.ToLower())))); // Now download the necessary files. for (int i = 0; i < toDownload.Count; i++) { ShowProgress(i / toDownload.Count); CloudBlockBlob blob = toDownload[i]; // todo: Download in parallel? await blob.DownloadToFileAsync(Path.Combine(options.Path, blob.Name), FileMode.Create, ct); if (ct.IsCancellationRequested) return; } ShowProgress(100); if (options.ExtractResults) { string archive = Path.Combine(options.Path, resultsFileName); string resultsDir = Path.Combine(options.Path, "results"); if (File.Exists(archive)) { // Extract the result files. using (ZipArchive zip = ZipFile.Open(archive, ZipArchiveMode.Read, Encoding.UTF8)) zip.ExtractToDirectory(resultsDir); try { // Merge results into a single .db file. DBMerger.MergeFiles(Path.Combine(resultsDir, "*.db"), false, "combined.db"); } catch (Exception err) { throw new Exception($"Results were successfully extracted to {resultsDir} but an error wasn encountered while attempting to merge the individual .db files", err); } // TBI: merge into csv file. if (options.ExportToCsv) throw new NotImplementedException(); } } }
/// <summary> /// Main program entry point. /// </summary> /// <param name="args"> Command line arguments</param> /// <returns> Program exit code (0 for success)</returns> public static int Main(string[] args) { if (args.Contains("/?") || args.Length < 1 || args.Length > 10) { WriteUsageMessage(); return(1); } arguments = args; try { if (version) { WriteVersion(); } else if (upgrade) { UpgradeFile(fileName, recurse); } else if (edit) { EditFile(fileName, recurse); } else if (mergeDBFiles) { DBMerger.MergeFiles(fileName, recurse, Path.Combine(Path.GetDirectoryName(fileName), "merged.db")); } else { // Run simulations var runner = new Runner(fileName, ignorePaths, recurse, runTests, runType, numberOfProcessors: numberOfProcessors, simulationNamePatternMatch: simulationNameRegex); runner.SimulationCompleted += OnJobCompleted; runner.SimulationGroupCompleted += OnSimulationGroupCompleted; runner.AllSimulationsCompleted += OnAllJobsCompleted; runner.Run(); // If errors occurred, write them to the console. if (exitCode != 0) { Console.WriteLine("ERRORS FOUND!!"); } if (verbose) { Console.WriteLine("Elapsed time was " + runner.ElapsedTime.TotalSeconds.ToString("F1") + " seconds"); } } } catch (Exception err) { Console.WriteLine(err.ToString()); exitCode = 1; } return(exitCode); }
/// <summary> /// Run Models with the given set of options. /// </summary> /// <param name="options"></param> public static void Run(Options options) { try { string[] files = options.Files.SelectMany(f => DirectoryUtilities.FindFiles(f, options.Recursive)).ToArray(); if (files == null || files.Length < 1) { throw new ArgumentException($"No files were specified"); } if (options.NumProcessors == 0) { throw new ArgumentException($"Number of processors cannot be 0"); } if (options.Upgrade) { foreach (string file in files) { UpgradeFile(file); if (options.Verbose) { Console.WriteLine("Successfully upgraded " + file); } } } else if (options.ListSimulationNames) { foreach (string file in files) { ListSimulationNames(file, options.SimulationNameRegex); } } else if (options.MergeDBFiles) { string[] dbFiles = files.Select(f => Path.ChangeExtension(f, ".db")).ToArray(); string outFile = Path.Combine(Path.GetDirectoryName(dbFiles[0]), "merged.db"); DBMerger.MergeFiles(dbFiles, outFile); } else { Runner runner; if (string.IsNullOrEmpty(options.EditFilePath)) { // Run simulations runner = new Runner(files, options.RunTests, options.RunType, numberOfProcessors: options.NumProcessors, simulationNamePatternMatch: options.SimulationNameRegex); } else { runner = new Runner(files.Select(f => EditFile.Do(f, options.EditFilePath)), true, true, options.RunTests, runType: options.RunType, numberOfProcessors: options.NumProcessors, simulationNamePatternMatch: options.SimulationNameRegex); } runner.SimulationCompleted += OnJobCompleted; if (options.Verbose) { runner.SimulationCompleted += WriteCompleteMessage; } if (options.ExportToCsv) { runner.SimulationGroupCompleted += OnSimulationGroupCompleted; } runner.AllSimulationsCompleted += OnAllJobsCompleted; runner.Run(); // If errors occurred, write them to the console. if (exitCode != 0) { Console.WriteLine("ERRORS FOUND!!"); } if (options.Verbose) { Console.WriteLine("Elapsed time was " + runner.ElapsedTime.TotalSeconds.ToString("F1") + " seconds"); } } } catch (Exception err) { Console.WriteLine(err.ToString()); exitCode = 1; } }