public async Task <IActionResult> Crop([FromQuery] ImageCropInputModel imageCropInputModel)
        {
            if (ModelState.IsValid)
            {
                return(BadRequest(ModelState));
            }

            var modifiedImage = await _imageProcessor.CropImage(imageCropInputModel);

            return(File(modifiedImage.ImageAsBytes, modifiedImage.ImageFormat));
        }
        /// <summary>
        /// Detect faces
        /// </summary>
        /// <param name="imageFilePath"></param>
        /// <returns></returns>
        public async Task <List <Face> > DetectFaces(string imagePath, IImageProcessor imageProcessor)
        {
            var faces = new List <Face>();

            if (_transThrottle.Disabled)
            {
                Logging.LogWarning("Azure has reached maximum monthly transactions, so is disabled.");
                return(faces);
            }

            if (_faceClient != null && _detectionType != AzureDetection.Disabled)
            {
                var watch = new Stopwatch("AzureFace");

                try
                {
                    var detectedFaces = await AzureDetect(imagePath);

                    if (detectedFaces != null && detectedFaces.Any())
                    {
                        faces = await IdentifyOrCreateFaces(detectedFaces);

                        foreach (var face in faces)
                        {
                            // Hopefully they'll improve this....
                            // https://docs.microsoft.com/en-us/answers/questions/494886/azure-faceclient-persondirectory-api-usage.html

                            using MemoryStream stream = new MemoryStream();
                            await imageProcessor.CropImage(new FileInfo( imagePath ), face.Left, face.Top, face.Width, face.Height, stream);

                            if (stream != null)
                            {
                                var persistedFace = await _transThrottle.Call("AddFace", _faceClient.PersonDirectory.AddPersonFaceFromStreamAsync(
                                                                                  face.PersonId.ToString(),
                                                                                  image: stream,
                                                                                  recognitionModel: RECOGNITION_MODEL,
                                                                                  detectionModel: DETECTION_MODEL));
                            }
                            else
                            {
                                Logging.Log($"Unable to crop image for Azure: no supported image processor for {imagePath}");
                            }
                        }
                    }
                }
                catch (ErrorException ex)
                {
                    Logging.LogError($"Azure face error: {ex.Response.Content}");
                }
                catch (Exception ex)
                {
                    Logging.LogError($"Exception during Azure face detection: {ex}");
                }

                watch.Stop();

                if (faces.Any())
                {
                    Logging.Log($"  Azure Detected {faces.Count()} faces in {watch.ElapsedTime}ms");
                }

                _transThrottle.ProcessNewTransactions();
            }
            else
            {
                Logging.LogVerbose($"Azure Face Service was not configured.");
            }

            return(faces);
        }