/// <summary> /// Downloads the images for the specified job. /// </summary> /// <example> /// This starts an image download task: /// <code> /// Task.Factory.StartNew(() => new ImageDownload().Download(job, 100)); /// </code> /// </example> /// <param name="value">The job whose images are to be fetched.</param> /// <param name="limit">Limit on number of images to download.</param> public void Download(StoredJob value) { // Path where images will be stored string path = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Jobs/" + value.JobId); // Check that Job is actually stored on server before starting to save images if (Directory.Exists(path)) { string work = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Jobs/" + value.JobId); for (int i = 0; i < value.Images.Length; i++) { //Downloads the images from AWS Concurrently and saves to directory in which job is stored Task image1 = Task.Factory.StartNew(() => AWS.GetObject(value.Images[i].Image1.Key, value.Images[i].Image1.Bucket, value.JobId)); Task image2 = Task.Factory.StartNew(() => AWS.GetObject(value.Images[i].Image2.Key, value.Images[i].Image1.Bucket, value.JobId)); Task.WaitAll(image1, image2); //Pushes the image onto the GlobalQueue GlobalQueue.AddToQueue(value.JobId, value.Images[i].Image1.Key, value.Images[i].Image2.Key); } //FINISHED } else { // Log Error, possibly wait some time and try again Console.WriteLine("Job is not stored"); } }
/// <summary> /// GET api/jobprogress/id. Returns a JSON response indicating the job's progress. /// </summary> /// <param name="id">Job ID</param> /// <returns>JSON JobProgress response</returns> public JobProgress Get(int id) { if (ProcessManager.JobCached(id)) { StoredJob job = ProcessManager.GetJob(id); return(JobProgress.CreateFromStored(job)); } else { return(JobProgress.CreateFailResponse("Job not found")); } }
/// <summary> /// Creates a Progress report object from a given StoredJob object /// </summary> /// <param name="job">Job to report on</param> /// <returns>New JobProgress object</returns> public static JobProgress CreateFromStored(StoredJob job) { JobProgress progress = new JobProgress(); progress.JobId = job.JobId; progress.Completed = job.Completed; progress.Started = job.Started; progress.Paused = job.Paused; if (job.Completed) { progress.Progress = 1; } else { progress.Progress = (float)job.BatchIndex / (float)job.Images.Length; } progress.res = true; return(progress); }
/// <summary> /// <para>Allocates jobs from the queue to parallel execution threads.</para> /// <para>Jobs will be allocated up to the set execution limit, and are dequeued when their execution starts.</para> /// </summary> public static void AllocateJobs() { for (int i = 0; i < (ExecutableLimit - RunningTasks) && RunningTasks < ExecutableLimit && TaskQueue.Count != 0; i++) { if (RunningTasks < ExecutableLimit) { Tuple <string, int> values = RemoveFromQueue(); string fileName = values.Item1; int jobId = values.Item2; StoredJob job = ProcessManager.GetJob(jobId); if (job == null || job.Stopped) //Don't run job if stopped externally. { // Because job is stopped don't run the job } else { RunningTasks += 1; Task.Factory.StartNew(() => new ImageDownload().Download(job)); Task.Factory.StartNew(() => new StartTask().RunTask(fileName, jobId)); } } } }
/// <summary> /// <para>POST api/job</para> /// <para></para> /// </summary> /// <param name="value">JobControl model object input</param> /// <returns>JSON GenericResponse indicating success or failure</returns> public GenericResponse Post([FromBody] JobControl value) { if (value == null) { return(GenericResponse.Failure("Invalid request")); } int id = value.JobId; string option = value.Option; StoredJob job = ProcessManager.GetJob(id); if (job == null) { return(GenericResponse.Failure("Job not stored")); } if (job.Completed) { return(GenericResponse.Failure("Job already completed")); } if (!job.Started) { return(GenericResponse.Failure("Job not started")); } if (option == "PAUSE") { Action c = delegate { job.Paused = true; }; //Pause the job here, if already paused do nothing c(); return(GenericResponse.Success(value.JobId)); } else if (option == "RESUME") { Action d = delegate { job.Paused = false; }; d(); return(GenericResponse.Success()); } else if (option == "STOP") { Action e = delegate { job.Stopped = true; if (job.exeProcess != null && !job.exeProcess.HasExited) { job.exeProcess.Kill(); } }; e(); return(GenericResponse.Success()); } else if (option == "RESTART") { Action f = delegate { job.Stopped = false; job.Paused = false; JobQueue.AddToQueue(job.Command, job.JobId); }; f(); return(GenericResponse.Success()); } else { return(GenericResponse.Failure("Option does not exist")); } }
/// <summary> /// <para>Attempts to execute the given Job.</para> /// <para>Output is stored line-by-line in "results.csv" in the App_Data/{id} folder for the job.</para> /// </summary> /// <param name="fileName">Name of executable to run from the Job's .zip.</param> /// <param name="jobId">Job's ID.</param> public void RunTask(string fileName, int jobId) { StoredJob job = ProcessManager.GetJob(jobId); WorkArray[] Images = job.Images; string filePath = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Jobs/" + jobId + "/Extracted/" + fileName); string ImagePath = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Jobs/" + jobId + "/Images/"); string output = System.Web.Hosting.HostingEnvironment.MapPath("~/App_Data/Jobs/" + jobId + "/results.csv"); // Validate each image foreach (WorkArray img in Images) { if (!ValidateImageName(img.Image1.Key) || !ValidateImageName(img.Image2.Key)) { Debug.WriteLine("Invalid image hash " + img.Image1.Key + "or" + img.Image2.Key + ". Aborting executable launch."); return; } } ProcessStartInfo startInfo = new ProcessStartInfo(); startInfo.FileName = filePath; startInfo.UseShellExecute = false; startInfo.RedirectStandardOutput = true; startInfo.RedirectStandardInput = false; startInfo.RedirectStandardError = true; startInfo.CreateNoWindow = false; //Set up header for file var w = new StreamWriter(output); var line = string.Format("{0},{1},{2},{3}", "Image1", "Image2", "Result", "Errors"); w.WriteLine(line); w.Flush(); try { for (int i = 0; i < Images.Length; i++) { while (GlobalQueue.QueueSize(jobId) == 0 || job.Paused == true) { // Hang till there's stuff on the queue to process //Add sleep or wait } if (job.Stopped) { if (i > 0) //If i == 0 then RunningTasks hasn't been increased yet { JobQueue.RunningTasks -= 1; } w.Close(); //Maybe clean job of system return; } if (i == 0) { Debug.WriteLine("Job + " + job.JobId + " started"); job.Started = true; } job.BatchIndex = i; // Helps to give the progress of the code Tuple <string, string> arguments = GlobalQueue.RemoveFromQueue(jobId); // Generate the image arguments startInfo.Arguments = ImagePath + arguments.Item1 + " " + ImagePath + arguments.Item2 + " "; using (job.exeProcess = Process.Start(startInfo)) { string strOut = job.exeProcess.StandardOutput.ReadToEnd(); string strErr = job.exeProcess.StandardError.ReadToEnd(); job.exeProcess.WaitForExit(); // Save the results job.ExitCode = job.exeProcess.ExitCode; //Write to csv files string first = Images[i].Image1.Key; string second = Images[i].Image2.Key; //Check that there are no commas in strings otherwise cause error String[] strOutArray = strOut.Split(','); String[] strErrArray = strErr.Split(','); strOut = String.Join("", strOutArray); strErr = String.Join("", strErrArray); line = string.Format("{0},{1},{2},{3}", first, second, strOut, strErr).Trim(); w.WriteLine(line); w.Flush(); } } w.Close(); job.Completed = true; //Signifies that the job is now complete //Given upload destination is currently the jobId JobQueue.RunningTasks -= 1; // Frees up space for the next running executable Action b = delegate { UploadQueue.AddToQueue(output, "citizen.science.image.storage.public", job.JobId.ToString()); }; b(); Debug.WriteLine("Uploaded:" + jobId); JobQueue.AllocateJobs(); } catch (Exception e) { Debug.WriteLine("Job execution failed: " + e.Message); } }