Esempio n. 1
0
        // POST api/jobs
        public IHttpActionResult Post([FromBody] Job job)
        {
            try
            {
                // filename and preset is required
                if (string.IsNullOrEmpty(job.FileName))
                {
                    return(BadRequest("FileName is required"));
                }

                if (string.IsNullOrEmpty(job.FileNameKey))
                {
                    return(BadRequest("FileNameKey is required"));
                }

                if (string.IsNullOrEmpty(job.PresetId))
                {
                    return(BadRequest("Preset is required"));
                }


                // Housekeeping: when a new one is added, delete the oldest one based on the limit set
                var oldestJob = _transcoderService.GetOldestJob();
                if (oldestJob != null)
                {
                    // get the pipeline so we can get the in / out bucket names
                    var pipeLine = _transcoderService.Pipeline().Pipeline;

                    // AWS doesn't have an endpoint for removing a job,
                    // only canceling and thats for "Submitted" status only
                    if (oldestJob.Status == "Submitted")
                    {
                        _transcoderService.CancelJob(oldestJob.Id);
                    }

                    // get the bucket for in / out files so they can be deleted.
                    _storageService.RemoveFilesStartWith(oldestJob.Input.Key.WithoutExtension(), pipeLine.InputBucket);

                    _storageService.RemoveFilesStartWith(oldestJob.Output.Key.WithoutExtension(), pipeLine.OutputBucket);
                }

                _transcoderService.CreateJob(job.FileName, job.FileNameKey, job.Rotate, job.Thumbnails, job.PresetId);

                return(Ok());
            }
            catch (Exception ex)
            {
                return(InternalServerError(ex));
            }
        }
        public ActionResult Download(string id)
        {
            var job = _transcoderService.JobById(id).Job;
            var pipeLineResponse = _transcoderService.Pipeline();

            var files = _storageService.FilesStartWith(job.Output.Key.WithoutExtension(), pipeLineResponse.Pipeline.OutputBucket);

            if (files.Length == 0)
            {
                // Redirect them back to the list if the files don't exist
                return(RedirectToAction("Index"));
            }

            // Create a zip file based on the output files
            var zipFile = new ZipFile();
            var stream  = new MemoryStream();

            // Extract the original vanity name used for the file.
            var vanityName =
                (job.UserMetadata.ContainsKey("name")
                    ? job.UserMetadata["name"]
                    : job.Output.Key).WithoutExtension();

            foreach (var file in files)
            {
                // replace S3 keyed file names back with the original vanity name
                zipFile.AddEntry(file.Name.Replace(job.Output.Key.WithoutExtension(), vanityName), file.OpenRead());
            }


            zipFile.Save(stream);
            stream.Position = 0;

            return(File(stream, MediaTypeNames.Application.Octet, $"{vanityName}.zip"));
        }
Esempio n. 3
0
        public async Task <IHttpActionResult> Post()
        {
            try
            {
                // The work-around in handling files being uploaded via ajax post (thanks Microsoft)
                var provider = new MultipartMemoryStreamProvider();
                await Request.Content.ReadAsMultipartAsync(provider);

                // extract file name and file contents
                var fileNameParam = provider.Contents[0].Headers.ContentDisposition.Parameters
                                    .FirstOrDefault(p => p.Name.ToLower() == "filename");

                var fileName = fileNameParam?.Value.Trim('"') ?? "";
                var file     = await provider.Contents[0].ReadAsStreamAsync();

                if (file.Length == 0 || string.IsNullOrEmpty(fileName))
                {
                    return(BadRequest("No file was provided"));
                }

                //Get the input S3 bucket name set on the pipeline config
                var piplineResponse = _transcoderService.Pipeline();

                //Create unique key and use that for the file being uploaded
                var key         = Guid.NewGuid().ToString();
                var fileNameKey = key + Path.GetExtension(fileName);
                _storageService.UploadFile(file, fileNameKey, piplineResponse.Pipeline.InputBucket);

                return(Ok(fileNameKey)); //return unique so it can be used for the job.
            }
            catch (Exception ex)
            {
                return(InternalServerError(ex));
            }
        }