// Uploads the image file and calls Detect Faces.
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair };

            // Call the Face API.
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    Face[] faces = await face_service_client.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                    return(faces);
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                MessageBox.Show(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Error");
                return(new Face[0]);
            }
        }
Пример #2
0
        public bool MakeRequestLocalFile(String file)
        {
            var requiedFaceAttributes = new FaceAttributeType[] {
                FaceAttributeType.Age,
                FaceAttributeType.Gender,
                //FaceAttributeType.Smile,
                //FaceAttributeType.FacialHair,
                //FaceAttributeType.HeadPose
            };

            try
            {
                Thread.Sleep(3 * 1000);
                using (Stream s = File.OpenRead(file))
                {
                    var faces = new List <Face>(faceServiceClient.DetectAsync(s, true, false, requiedFaceAttributes).Result);
                    return(faces.Any(face => face.FaceAttributes.Gender == "female" &&
                                     face.FaceAttributes.Age < 30));
                }
            }
            catch
            {
                return(false);
            }
        }
        public async Task <string> DetectEmotion(string filePath)
        {
            var emotion = string.Empty;

            try
            {
                using (var imgStream = File.OpenRead(filePath))
                {
                    var faceattrbutes = new List <FaceAttributeType>();
                    faceattrbutes.Add(FaceAttributeType.Emotion);

                    var face = await fsClient.DetectAsync(imgStream, true, true, faceattrbutes);

                    var emotionresult = face.Select(f => new {
                        f.FaceAttributes.Emotion
                    }).ToList().FirstOrDefault();

                    IEnumerable <KeyValuePair <string, float> > emotionrating = new List <KeyValuePair <string, float> >();
                    emotionrating = emotionresult.Emotion.ToRankedList().OrderByDescending(f => f.Value);
                    emotion       = emotionrating.FirstOrDefault().Key;
                }
            }
            catch (Exception e)
            {
                emotion = "error";
            }

            return(emotion);
        }
Пример #4
0
        private async Task <Face[]> DetectFacesFrom(string pictureUrl)
        {
            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.FacialHair };

            // Call the Face API.
            try
            {
                Log("Detecting Faces");
                Face[] faces = await _faceServiceClient.DetectAsync(pictureUrl, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                Log("Faces length: " + faces.Length);
                return(faces);
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                //MessageBox.Show(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                //MessageBox.Show(e.Message, "Error");
                return(new Face[0]);
            }
        }
        public async Task <Guid> detectPerson(String verifyImagePath, String groupId)
        {
            using (Stream s = File.OpenRead(verifyImagePath))
            {
                var faces = await faceServiceClient.DetectAsync(s);

                var faceIds = faces.Select(face => face.FaceId).ToArray();

                var results = await faceServiceClient.IdentifyAsync(faceIds, groupId);

                foreach (var identifyResult in results)
                {
                    Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                    if (identifyResult.Candidates.Length == 0)
                    {
                        Console.WriteLine("No one identified");
                    }
                    else
                    {
                        return(identifyResult.Candidates[0].PersonId);
                    }
                }
            }
            return(Guid.Empty);
        }
        public async Task <NamedFace[]> AnalyzeImageUsingHelper(Stream stream)
        {
            Face[] faces = await faceDetector.DetectAsync(stream, false, true, true, false);

            NamedFace[] namedFaces = new NamedFace[faces.Length];

            //Copy to named faces vector.
            for (int i = 0; i < faces.Length; i++)
            {
                namedFaces[i] = new NamedFace(faces[i]);
            }



            // TODO: Is this the right place to get the images from???
            bool identifyFaces;

            bool.TryParse(ConfigurationManager.AppSettings["IdentifySpecificPeople"] ?? "false", out identifyFaces);

            if (identifyFaces && faces.Length > 0)
            {
                var faceIds = faces.Select(face => face.FaceId).ToArray();

                var results = await faceDetector.IdentityAsync("coworkers", faceIds);

                foreach (var identifyResult in results)
                {
                    Console.WriteLine("Result of face: {0}", identifyResult.FaceId);

                    if (identifyResult.Candidates.Length == 0)
                    {
                        Console.WriteLine("No one identified");
                    }
                    else
                    {
                        var candidateId = identifyResult.Candidates[0].PersonId;
                        var person      = await faceDetector.GetPersonAsync("coworkers", candidateId);

                        if (identifyResult.Candidates[0].Confidence > 0.5)
                        {
                            for (int i = 0; i < namedFaces.Length; i++)
                            {
                                if (namedFaces[i].FaceId == identifyResult.FaceId)
                                {
                                    // Set name.
                                    namedFaces[i].Name = person.Name;
                                }
                            }

                            Console.WriteLine("Identified as {0}", person.Name);
                        }
                    }
                }
            }

            return(namedFaces);
        }
        private async Task <Face[]> detectFace(Stream path, String personId)
        {
            using (path)
            {
                var faces = await faceServiceClient.DetectAsync(path);

                return(faces);
            }
        }
Пример #8
0
        private async void btnFacialVerification_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                Guid faceid1;
                Guid faceid2;

                ComboBoxItem cbi1 = (ComboBoxItem)cmbImages1.SelectedItem;
                ComboBoxItem cbi2 = (ComboBoxItem)cmbImages2.SelectedItem;

                // Detect the face in each image - need the FaceId for each
                using (Stream faceimagestream = GetStreamFromUrl(cbi1.Tag.ToString()))
                {
                    var faces = await _faceserviceclient.DetectAsync(faceimagestream, returnFaceId : true);

                    if (faces.Length > 0)
                    {
                        faceid1 = faces[0].FaceId;
                    }
                    else
                    {
                        throw new Exception("No face found in image 1.");
                    }
                }
                using (Stream faceimagestream = GetStreamFromUrl(cbi2.Tag.ToString()))
                {
                    var faces = await _faceserviceclient.DetectAsync(faceimagestream, returnFaceId : true);

                    if (faces.Length > 0)
                    {
                        faceid2 = faces[0].FaceId;
                    }
                    else
                    {
                        throw new Exception("No face found in image 2.");
                    }
                }

                // Verify the faces
                var result = await _faceserviceclient.VerifyAsync(faceid1, faceid2);

                txtInfo.Text  = "Match Confidence: " + result.Confidence.ToString() + Environment.NewLine;
                txtInfo.Text += "Identical Images: " + result.IsIdentical.ToString() + Environment.NewLine;
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Пример #9
0
        // read image as a stream and analize it by custom DetectAsync method
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    var faces = await faceServiceClient.DetectAsync(imageFileStream,
                                                                    true,
                                                                    true,
                                                                    new FaceAttributeType[] {
                        FaceAttributeType.Gender,
                        FaceAttributeType.Age,
                        FaceAttributeType.Emotion,
                        FaceAttributeType.FacialHair,
                        FaceAttributeType.Glasses
                    });

                    return(faces.ToArray());
                }
            }
            catch (Exception ex)
            {
                return(new Face[0]);
            }
        }
Пример #10
0
        public static async Task <string> UploadAndDetectFaces(string imageFilePath)
        {
            try
            {
                var requiredFaceAttributes = new FaceAttributeType[] {
                    FaceAttributeType.Age,
                    FaceAttributeType.Gender,
                    FaceAttributeType.Emotion
                };
                using (WebClient webClient = new WebClient())
                {
                    using (Stream imageFileStream = webClient.OpenRead(imageFilePath))
                    {
                        var faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes);

                        var    faceAttributes = faces.Select(face => face.FaceAttributes);
                        string result         = string.Empty;
                        faceAttributes.ToList().ForEach(f =>
                                                        result += $"Age: {f.Age.ToString()} Years  Gender: {f.Gender}  Emotion: {f.Emotion.ToString()}{Environment.NewLine}{Environment.NewLine}"
                                                        );
                        return(result);
                    }
                }
            }
            catch (Exception ex)
            {
                return(string.Empty);
            }
        }
        public async Task <string> Face_Analyse(Stream imageFileStream)
        {
            //Count Faces
            Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

            if (faces.Count() > 1)
            {
                throw new Exception("Zu viele Gesichter erkannt...");
            }
            if (faces.Count() < 1)
            {
                throw new Exception("Kein Gesicht erkannt...");
            }

            //Emotion für Gesicht auslesen
            EmotionScores emotionScores            = faces[0].FaceAttributes.Emotion;
            Dictionary <string, float> emotionList = new Dictionary <string, float>(emotionScores.ToRankedList());

            //Zusammensetzen der Rückgabe
            StringBuilder sb = new StringBuilder();

            sb.Append("<div><table>");
            foreach (KeyValuePair <string, float> element in emotionList)
            {
                sb.Append(String.Format("<tr><td>{0}: " + "</td><td>" + " {1}%" + "</td></tr>", Translate(element.Key), (element.Value * 100).ToString()));
            }
            sb.Append("</table></div>");

            //Rückgabe
            return(Newtonsoft.Json.JsonConvert.SerializeObject(sb.ToString()));
        }
Пример #12
0
        /// <summary>
        /// Upload the frame and get the face detect result
        /// </summary>
        ///

        public async void DetectFaces(SoftwareBitmap bitmap)
        {
            if (bitmap == null || _processingFace)
            {
                return;
            }
            _processingFace = true;
            try
            {
                using (var randomAccessStream = new InMemoryRandomAccessStream())
                {
                    var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, randomAccessStream);

                    encoder.SetSoftwareBitmap(bitmap);
                    await encoder.FlushAsync();

                    randomAccessStream.Seek(0);

                    Face[] detectedfaces = await _faceServiceClient.DetectAsync(randomAccessStream.AsStream(), true, false, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.Glasses });

                    CheckPersons(detectedfaces, bitmap);
                }
            }
            catch (Exception ex)
            {
                //   Debug.WriteLine("FaceAPIException HttpStatus: " + ex.HttpStatus + ", ErrorCode : " + ex.ErrorCode + ", ErrorMessage: " + ex.ErrorMessage);
                Debug.WriteLine("DetectFaces exception : " + ex.Message);
                ProcessResults(null, null, null);
            }
        }
Пример #13
0
        // carica l'immagine e chiama Detect Faces
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {   // l'elenco degli attributi del viso da ritornare
            IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair };

            // chiama le Face API
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                    return(faces);
                }
            }
            // intercetta e visualizza gli errori delle Face API
            catch (FaceAPIException f)
            {
                MessageBox.Show(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            //  intercetta e visualizza tutti gli altri errori
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Errore!");
                return(new Face[0]);
            }
        }
        private async Task <FaceRectangle[]> UploadAndDetectFaces(string imageFilePath)
        {
            try
            {
                Windows.Storage.StorageFile file = await KnownFolders.PicturesLibrary.GetFileAsync(imageFilePath);

                if (file != null)
                {
                    // Open a stream for the selected file.
                    // The 'using' block ensures the stream is disposed
                    // after the image is loaded.
                    using (Stream fileStream =
                               await file.OpenStreamForReadAsync())
                    {
                        var faces = await faceServiceClient.DetectAsync(fileStream);

                        var faceRects = faces.Select(face => face.FaceRectangle);
                        return(faceRects.ToArray());
                    }
                }
                else
                {
                    return(new FaceRectangle[0]);
                }
            }
            catch (Exception ex)
            {
                SolidColorBrush brush = new SolidColorBrush(Windows.UI.Colors.Red);
                Status.Foreground = brush;
                Status.Text       = "Error Loading picture : " + ex.Message;
                return(null);
            }
        }
Пример #15
0
        private async Task IdentifyPersons(string personGroupId, string testImageFile)
        {
            using (Stream s = File.OpenRead(testImageFile))
            {
                var faces = await faceClient.DetectAsync(s);

                var faceIds = faces.Select(face => face.FaceId).ToArray();

                var results = await faceClient.IdentifyAsync(personGroupId, faceIds);

                foreach (var identifyResult in results)
                {
                    Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                    if (identifyResult.Candidates.Length == 0)
                    {
                        Console.WriteLine("No one identified");
                    }
                    else
                    {
                        // Get top 1 among all candidates returned
                        var candidateId = identifyResult.Candidates[0].PersonId;
                        var person      = await faceClient.GetPersonAsync(personGroupId, candidateId);

                        //var person = await faceClient.PersonGroupPerson.GetAsync(personGroupId, candidateId);
                        Console.WriteLine("Identified as {0}", person.Name);
                    }
                }
            }
        }
Пример #16
0
        /*
         * Faz o upload da imagem para a API de reconheicmento de rostos
         * O retorno é a quantidade de rostos identificados.
         */
        private async Task <FaceRectangle[]> UploadAndDetectFaces(string imageFilePath, StorageFile storage)
        {
            try
            {
                StorageFile storageFile = storage;

                var randomAccessStream = await storageFile.OpenReadAsync();

                using (Stream imageFileStream = randomAccessStream.AsStreamForRead())
                {
                    var faces = await faceServiceClient.DetectAsync(imageFileStream);

                    var faceRects = faces.Select(face => face.FaceRectangle);
                    //output.Text = "Faces: {0}" + String.Format("{0}", faceRects.ToArray().Length);
                    return(faceRects.ToArray());
                }
            }
            catch (Exception ex)
            {
                output.Text = "Excepiton: " + String.Format("{0}", ex.Source);
                System.Diagnostics.Debug.WriteLine(ex.Message);
            }
            {
                return(new FaceRectangle[0]);// nao identificou nada
            }
        }
Пример #17
0
        public async Task <Respuesta> SetFoto([FromBody] FotoRequest value)

        // public async Task<IHttpActionResult>

        {
            if (value == null)

            {
                return(new Respuesta
                {
                    mensaje = "Error no se obtuvo datos",
                    validacion = false
                });
            }
            ;
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender,
                                          FaceAttributeType.Age, FaceAttributeType.Smile,
                                          FaceAttributeType.Emotion, FaceAttributeType.Glasses,
                                          FaceAttributeType.Hair };

            // Call the Face API.
            using (Stream imageFileStream = new MemoryStream(value.Array))
            {
                faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);
            }
            return(new Respuesta
            {
                mensaje = faces[0].FaceAttributes.Age.ToString(),
                validacion = true
            });
        }
Пример #18
0
        // Uploads the image file and calls Detect Faces.

        private async Task <Face[]> UploadAndDetectFaces(string filename)
        {
            // The list of Face attributes to return.
            var faceAttributes = new FaceAttributeType[] { };

            // Call the Face API.
            try
            {
                var file     = Path.Combine(_environment.WebRootPath, "uploads", filename);
                var fileData = System.IO.File.ReadAllBytes(file);

                using (Stream ms = new MemoryStream(fileData))
                {
                    return(await faceServiceClient.DetectAsync(ms, returnFaceId : true));
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                errors.Add($"{filename}, {f.ErrorCode}: {f.ErrorMessage}");
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                errors.Add($"{filename}, Error: {e.Message}");
            }

            return(null);
        }
Пример #19
0
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.FacialHair, FaceAttributeType.Hair };

            // Call the Face API.
            try
            {
                Face[] faces = null;
                while (faces == null || faces.Length == 0)
                {
                    using (Stream imageFileStream = File.OpenRead(imageFilePath))
                        faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);
                }
                return(faces);
            }

            catch (FaceAPIException f)
            {
                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                var msg = e.Message;
                //MessageBox.Show(e.Message, "Error");
                return(new Face[0]);
            }
        }
Пример #20
0
        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    _(String.Format("Extracting faces from {0}...", imageFilePath));
                    var faces = await faceServiceClient.DetectAsync(imageFileStream,
                                                                    true,
                                                                    true,
                                                                    new FaceAttributeType[] {
                        FaceAttributeType.Gender,
                        FaceAttributeType.Age,
                        FaceAttributeType.Emotion
                    });

                    _(String.Format("{0} faces found!", faces.Length));
                    return(faces.ToArray());
                }
            }
            catch (FaceAPIException ex)
            {
                //MessageBox.Show(ex.Message);
                _(ex.ErrorMessage);
                return(new Face[0]);
            }
        }
Пример #21
0
        //Incarca imaginea si apeleaza Detect Faces

        private async Task <Face[]> UploadAndDetectFaces(string imageFilePath)
        {
            //Lista atributelor fetei ce se returneaza
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair };

            //Apeleaza Face APIC
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                    return(faces);
                }
            }
            //API error handling
            catch (FaceAPIException f)
            {
                MessageBox.Show(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            //Error handling general
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Error");
                return(new Face[0]);
            }
        }
Пример #22
0
        private async Task <FaceRectangle[]> UploadAndDetectFaces(string imageFilePath)
        {
            List <FaceAttributeType> FaceAG = new List <FaceAttributeType>();

            FaceAG.Add(FaceAttributeType.Age);
            FaceAG.Add(FaceAttributeType.Gender);// параметры которые хотим узнать

            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    var faces = await m_faceServiceClient.DetectAsync(imageFileStream, true, false, FaceAG); // отправляем фото на анализ

                    var faceRects            = faces.Select(face => face.FaceRectangle);                     // получаем список лиц
                    var faceA                = faces.Select(face => face.FaceAttributes);                    // получаем атрибуты - пол и возраст
                    FaceAttributes[] faceAtr = faceA.ToArray();
                    foreach (var fecea in faceAtr)
                    {
                        Console.WriteLine(@"Age: {0}", fecea.Age);
                        Console.WriteLine(@"Gender: {0}", fecea.Gender);
                        Console.WriteLine();
                    }

                    return(faceRects.ToArray());
                }
            }
            catch (Exception)
            {
                return(new FaceRectangle[0]);
            }
        }
Пример #23
0
        public static async Task <IList <string> > GetPeopleAsync(Stream stream)
        {
            var result = new List <string>();
            var faces  = await faceServiceClient.DetectAsync(stream);

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            if (faceIds.Length != 0)
            {
                var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds);

                foreach (var identifyResult in results)
                {
                    if (identifyResult.Candidates.Length != 0 && identifyResult.Candidates[0].Confidence > confidentThresold)
                    {
                        // Get top 1 among all candidates returned
                        var candidateId = identifyResult.Candidates[0].PersonId;
                        var person      = await faceServiceClient.GetPersonAsync(personGroupId, candidateId);

                        result.Add(person.Name);
                    }
                }
            }
            return(result);
        }
        public async Task <List <Face> > DetectFaces(string photoUrl)
        {
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age };
            List <Face> faces = new List <Face>();

            try
            {
                Thread.Sleep(3500);
                var detectionResult = await faceServiceClient.DetectAsync(photoUrl, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                faces.AddRange(detectionResult.ToList());
                return(faces);
            }

            catch (FaceAPIException f)
            {
                return(faces);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                return(faces);
            }
        }
Пример #25
0
        public static async Task IdentifyFace(byte[] image, TraceWriter log)
        {
            try
            {
                using (Stream stream = new MemoryStream(image))
                {
                    var faces = await faceServiceClient.DetectAsync(stream);

                    if (faces.Length == 1)
                    {
                        log.Info("ONE FACE FOUND");

                        //Identify faces + Check Gear
                        await Task.WhenAll(CheckAuthorized(faces, log), IdentifyGear(image, log));
                    }
                    else if (faces.Length > 1)
                    {
                        log.Info("TOO MANY FACES");
                        throw new TooManyFacesException();
                    }
                    else
                    {
                        log.Info("COULD NOT FIND FACE");
                        throw new NoFaceException();
                    }
                }
            }
            catch (FaceAPIException e)
            {
                log.Info(e.ToString());
            }
        }
Пример #26
0
        private async Task <Face[]> DetectFaces(Stream imageStream)
        {
            var attributes = new List <FaceAttributeType>();

            attributes.Add(FaceAttributeType.Age);
            attributes.Add(FaceAttributeType.Gender);
            attributes.Add(FaceAttributeType.Smile);
            attributes.Add(FaceAttributeType.Glasses);
            attributes.Add(FaceAttributeType.FacialHair);
            Face[] faces = null;
            try
            {
                faces = await _faceServiceClient.DetectAsync(imageStream, true, true, attributes);
            }
            catch (FaceAPIException exception)
            {
                AppendMessage("------------------------");
                AppendMessage($"Face API Error = {exception.ErrorMessage}");
            }
            catch (Exception exception)
            {
                AppendMessage("------------------------");
                AppendMessage($"Face API Error = {exception.Message}");
            }
            return(faces);
        }
Пример #27
0
        //// Uploads the image file and calls Detect Faces.

        public async Task <Face[]> UploadAndDetectFaces(Bitmap image)
        {
            // The list of Face attributes to return.
            IEnumerable <FaceAttributeType> faceAttributes =
                new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.FacialHair };

            // Call the Face API.
            try
            {
                using (Stream imageFileStream = new MemoryStream())
                {
                    image.Save(imageFileStream, System.Drawing.Imaging.ImageFormat.Bmp);
                    imageFileStream.Seek(0, SeekOrigin.Begin);
                    Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes);

                    return(faces);
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                MessageBox.Show(f.ErrorMessage, f.ErrorCode);
                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Error");
                return(new Face[0]);
            }
        }
Пример #28
0
        private async Task <Face[]> GetDetectedFaces(string imageFilePath)
        {
            // Call the Face API.
            try
            {
                using (Stream imageFileStream = File.OpenRead(imageFilePath))
                {
                    _faces = await _faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : _faceAttributes);

                    InitializeFaceDescriptionsStringArray();
                    return(_faces);
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                var errorMessage = f.ErrorMessage;
                var errorCode    = f.ErrorCode;

                return(new Face[0]);
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                var message = e.Message;
                return(new Face[0]);
            }
        }
Пример #29
0
        async void UploadAndDetectFaces(string imageFilePath)
        {
            try
            {
                StorageFolder appInstalledFolder = Windows.ApplicationModel.Package.Current.InstalledLocation;
                StorageFolder assets             = await appInstalledFolder.GetFolderAsync("Assets");

                var storageFile = await assets.GetFileAsync(imageFilePath);

                var randomAccessStream = await storageFile.OpenReadAsync();

                using (Stream stream = randomAccessStream.AsStreamForRead())
                {
                    // This is the fragment where the face is recognized:
                    var faces = await faceServiceClient.DetectAsync(stream);

                    var faceRects = faces.Select(face => face.FaceRectangle);
                    _faceRectangles = faceRects.ToArray();
                    // Forces a redraw on the canvas control
                    CustomCanvas.Invalidate();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex.Message);
            }
        }
Пример #30
0
        public async Task FaceDetect(string image)
        {
            // Call the Face API.
            try
            {
                using (Stream imageFileStream = File.OpenRead(image))
                {
                    var faces = await _faceServiceClient.DetectAsync(imageFileStream,
                                                                     returnFaceId : true,
                                                                     returnFaceLandmarks : false,
                                                                     returnFaceAttributes : null);


                    //Get First Face in List
                    if (faces.Length > 0)
                    {
                        faceId2 = faces[0].FaceId;
                    }
                }
            }
            // Catch and display Face API errors.
            catch (FaceAPIException f)
            {
                await DisplayAlert("Error", f.ErrorMessage, "ok");
            }
            // Catch and display all other errors.
            catch (Exception e)
            {
                await DisplayAlert("Error", e.Message, "ok");
            }
        }