// GET: MultiFaceDetection public async Task <ActionResult> Index() { try { // Step 1. Get images from AWS S3 storage. List <string> imagesNames = new List <string> { "Deepika_5.jpg", "deepika-padukone-story_647_112417055722.jpg", "Disha_Patani_promotes_M.S._Dhoni_–_The_Untold_Story_(05).jpg", "Disha-Patani-1.jpg", "elizabeth-olsen-explains-why-singing-badly-on-purpose-for-her-new-movie-was-freeing.jpg", "female-superheroes-black-widow.jpg", "olsens.jpg", "Scarlett_Johansson_in_Kuwait_01b-tweaked.jpg", "alexandra_daddario_2015-wide.jpg", "Jacqueline-Fernandez.jpg" }; // Step 2. For each image, run the face api detection algorithm. var faceServiceClient = new FaceServiceClient(ServiceKey, "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"); for (int i = 0; i < imagesNames.Count; i++) { var detectedFaces = new ObservableCollection <vmFace>(); var resultCollection = new ObservableCollection <vmFace>(); using (WebClient client = new WebClient()) { byte[] fileBytes = client.DownloadData(string.Concat("https://s3-eu-west-1.amazonaws.com/faceapiimages/", imagesNames[i])); bool exists = System.IO.Directory.Exists(Server.MapPath(directory)); if (!exists) { try { Directory.CreateDirectory(Server.MapPath(directory)); } catch (Exception ex) { ex.ToString(); } } string imageRelativePath = "../MultiDetectedFiles" + '/' + imagesNames[i]; string imageFullPath = Server.MapPath(directory) + '/' + imagesNames[i] as string; System.IO.File.WriteAllBytes(imageFullPath, fileBytes); using (var stream = client.OpenRead(string.Concat("https://s3-eu-west-1.amazonaws.com/faceapiimages/", imagesNames[i]))) { Face[] faces = await faceServiceClient.DetectAsync(stream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); Bitmap CroppedFace = null; foreach (var face in faces) { //Create & Save Cropped Images var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string; var croppedImgPath = "../MultiDetectedFiles" + '/' + croppedImg as string; var croppedImgFullPath = Server.MapPath(directory) + '/' + croppedImg as string; CroppedFace = CropBitmap( (Bitmap)Bitmap.FromFile(imageFullPath), face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg); if (CroppedFace != null) { ((IDisposable)CroppedFace).Dispose(); } detectedFaces.Add(new vmFace() { ImagePath = imageRelativePath, FileName = imagesNames[i], FilePath = croppedImgPath, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.FaceAttributes.Gender, Age = string.Format("{0:#} years old", face.FaceAttributes.Age), IsSmiling = face.FaceAttributes.Smile > 0.0 ? "Smile" : "Not Smile", Glasses = face.FaceAttributes.Glasses.ToString(), }); } // Convert detection result into UI binding object for rendering. var imageInfo = UIHelper.GetImageInfoForRenderingFromStream(stream); var rectFaces = UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo); foreach (var face in rectFaces) { resultCollection.Add(face); } var faceModal = new FaceDetectionModal { DetectedFaces = detectedFaces, ResultCollection = resultCollection }; this.finalModal.Items.Add(faceModal); } } } } catch (Exception ex) { Console.Write(ex); throw; } return(View(this.finalModal)); }
// GET: MultiFaceDetection public async Task <ActionResult> Index() { try { // Step 1. Get images from blob storage. BlobHelper BlobHelper = new BlobHelper(StorageAccount, StorageKey); List <string> blobs = BlobHelper.ListBlobs(Container); List <string> images = new List <string>(); foreach (var blobName in blobs) { images.Add(blobName); } // Step 2. For each image, run the face api detection algorithm. var faceServiceClient = new FaceServiceClient(ServiceKey, "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"); for (int i = 0; i < blobs.Count; i++) { using (WebClient client = new WebClient()) { byte[] fileBytes = client.DownloadData(string.Concat("http://faceapiweu.blob.core.windows.net/cloudprojectsampleimages/", images[i])); bool exists = System.IO.Directory.Exists(Server.MapPath(directory)); if (!exists) { try { Directory.CreateDirectory(Server.MapPath(directory)); } catch (Exception ex) { ex.ToString(); } } string imageFullPath = Server.MapPath(directory) + '/' + images[i] as string; System.IO.File.WriteAllBytes(imageFullPath, fileBytes); using (var stream = client.OpenRead(string.Concat("http://faceapiweu.blob.core.windows.net/cloudprojectsampleimages/", images[i]))) { //string imageFullPath = ""; //using (Bitmap bmp = new Bitmap(stream)) //{ // imageFullPath = Server.MapPath(directory) + "/original/" + images[i] as string; // bmp.Save(imageFullPath, ImageFormat.Jpeg); //} Face[] faces = await faceServiceClient.DetectAsync(stream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); Bitmap CroppedFace = null; foreach (var face in faces) { //Create & Save Cropped Images var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string; var croppedImgPath = "../MultiDetectedFiles" + '/' + croppedImg as string; var croppedImgFullPath = Server.MapPath(directory) + '/' + croppedImg as string; CroppedFace = CropBitmap( (Bitmap)Bitmap.FromFile(imageFullPath), face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg); if (CroppedFace != null) { ((IDisposable)CroppedFace).Dispose(); } DetectedFaces.Add(new vmFace() { ImagePath = null, FileName = croppedImg, FilePath = croppedImgPath, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.FaceAttributes.Gender, Age = string.Format("{0:#} years old", face.FaceAttributes.Age), IsSmiling = face.FaceAttributes.Smile > 0.0 ? "Smile" : "Not Smile", Glasses = face.FaceAttributes.Glasses.ToString(), }); } // Convert detection result into UI binding object for rendering. var imageInfo = UIHelper.GetImageInfoForRenderingFromStream(stream); var rectFaces = UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo); foreach (var face in rectFaces) { ResultCollection.Add(face); } var faceModal = new FaceDetectionModal { DetectedFaces = DetectedFaces, ResultCollection = ResultCollection }; this.finalModal.Items.Add(faceModal); } } } } catch (Exception ex) { Console.Write(ex); throw; } return(View(this.finalModal)); }