public ActionResult Download(string id) { var job = _transcoderService.JobById(id).Job; var pipeLineResponse = _transcoderService.Pipeline(); var files = _storageService.FilesStartWith(job.Output.Key.WithoutExtension(), pipeLineResponse.Pipeline.OutputBucket); if (files.Length == 0) { // Redirect them back to the list if the files don't exist return(RedirectToAction("Index")); } // Create a zip file based on the output files var zipFile = new ZipFile(); var stream = new MemoryStream(); // Extract the original vanity name used for the file. var vanityName = (job.UserMetadata.ContainsKey("name") ? job.UserMetadata["name"] : job.Output.Key).WithoutExtension(); foreach (var file in files) { // replace S3 keyed file names back with the original vanity name zipFile.AddEntry(file.Name.Replace(job.Output.Key.WithoutExtension(), vanityName), file.OpenRead()); } zipFile.Save(stream); stream.Position = 0; return(File(stream, MediaTypeNames.Application.Octet, $"{vanityName}.zip")); }
// DELETE api/jobs/5 public IHttpActionResult Delete(string id) { try { // get the job first var job = _transcoderService.JobById(id).Job; // get the pipeline so we can get the in / out bucket names var pipeLine = _transcoderService.Pipeline().Pipeline; if (job.Status != "Submitted") { return(Ok("The job is currently processing or completed and not allowed to be canceled.")); } _transcoderService.CancelJob(id); // from the original job response, get the bucket for in / out files. _storageService.RemoveFilesStartWith(job.Input.Key.WithoutExtension(), pipeLine.InputBucket); _storageService.RemoveFilesStartWith(job.Output.Key.WithoutExtension(), pipeLine.OutputBucket); return(Ok()); } catch (Exception ex) { return(InternalServerError(ex)); } }