コード例 #1
0
        public async Task PedirImagen(IDialogContext context, IAwaitable <IEnumerable <Attachment> > argument)
        {
            var        imagen  = await argument;
            HttpClient cliente = new HttpClient();

            foreach (var pic in imagen)
            {
                var url  = pic.ContentUrl;
                var dato = await cliente.GetByteArrayAsync(url);

                Stream            stream            = new MemoryStream(dato);
                FaceServiceClient faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);
                var faces = await faceServiceClient.DetectAsync(stream, true, false, null);

                var resultadoIdentifiacion = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : "1");

                var res = resultadoIdentifiacion[0];
                if (res.Candidates.Length > 0)
                {
                    try
                    {
                        var nombrePersona = await faceServiceClient.GetPersonInLargePersonGroupAsync("1", new Guid(res.Candidates[0].PersonId.ToString()));

                        var nombreText = nombrePersona.Name.ToString();
                        await context.PostAsync("La persona es: " + nombreText.ToString());
                    }
                    catch (Exception ex)
                    {
                        await context.PostAsync("No se encontró la identidad de la persona. Para darla de alta utilizar el comando: \"alta\"");
                    }
                }
            }
        }
コード例 #2
0
        public static async Task <string> ObtenerIdentidadAPI(VideoFrame videoFrame, VideoEncodingProperties videoProperties, MediaCapture mediaCapture)
        {
            byte[] arrayImage;
            var    PersonName = "";

            videoFrame = new VideoFrame(InputPixelFormat, (int)videoProperties.Width, (int)videoProperties.Height);


            try
            {
                var valor = await mediaCapture.GetPreviewFrameAsync(videoFrame);

                SoftwareBitmap softwareBitmapPreviewFrame = valor.SoftwareBitmap;

                Size  sizeCrop      = new Size(softwareBitmapPreviewFrame.PixelWidth, softwareBitmapPreviewFrame.PixelHeight);
                Point point         = new Point(0, 0);
                Rect  rect          = new Rect(0, 0, softwareBitmapPreviewFrame.PixelWidth, softwareBitmapPreviewFrame.PixelHeight);
                var   arrayByteData = await EncodedBytesClass.EncodedBytes(softwareBitmapPreviewFrame, BitmapEncoder.JpegEncoderId);

                SoftwareBitmap softwareBitmapCropped = await MainPage.CreateFromBitmap(softwareBitmapPreviewFrame, (uint)softwareBitmapPreviewFrame.PixelWidth, (uint)softwareBitmapPreviewFrame.PixelHeight);

                SoftwareBitmap displayableImage = SoftwareBitmap.Convert(softwareBitmapCropped, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
                //SoftwareBitmap displayableImageGray = SoftwareBitmap.Convert(softwareBitmapCropped, BitmapPixelFormat.Gray16);



                //await MainPage.imageSourceCW.SetBitmapAsync(displayableImage).AsTask();

                arrayImage = await EncodedBytesClass.EncodedBytes(displayableImage, BitmapEncoder.JpegEncoderId);

                var nuevoStreamFace = new MemoryStream(arrayImage);


                string subscriptionKey      = localSettings.Values["apiKey"] as string;
                string subscriptionEndpoint = "https://southcentralus.api.cognitive.microsoft.com/face/v1.0";
                var    faceServiceClient    = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);

                //var caraPersona = await faceServiceClient.GetPersonFaceAsync("1", )
                try
                {
                    // using (var fsStream = File.OpenRead(sampleFile))
                    // {
                    IEnumerable <FaceAttributeType> faceAttributes =
                        new FaceAttributeType[] {
                        FaceAttributeType.Gender,
                        FaceAttributeType.Age,
                        FaceAttributeType.Smile,
                        FaceAttributeType.Emotion,
                        FaceAttributeType.Glasses,
                        FaceAttributeType.Hair
                    };


                    var faces = await faceServiceClient.DetectAsync(nuevoStreamFace, true, false, faceAttributes);

                    string edad    = string.Empty;
                    string genero  = string.Empty;
                    string emocion = string.Empty;

                    var resultadoIdentifiacion = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : App4.MainPage.GroupId);



                    for (int idx = 0; idx < faces.Length; idx++)
                    {
                        // Update identification result for rendering
                        edad   = faces[idx].FaceAttributes.Age.ToString();
                        genero = faces[idx].FaceAttributes.Gender.ToString();

                        if (genero != string.Empty)
                        {
                            if (genero == "male")
                            {
                                genero = "Masculino";
                            }
                            else
                            {
                                genero = "Femenino";
                            }
                        }



                        var res = resultadoIdentifiacion[idx];

                        if (res.Candidates.Length > 0)
                        {
                            var nombrePersona = await faceServiceClient.GetPersonInLargePersonGroupAsync(App4.MainPage.GroupId, res.Candidates[0].PersonId);

                            PersonName = nombrePersona.Name.ToString();
                            //var estadoAnimo =
                        }
                        else
                        {
                            //txtResult.Text = "Unknown";
                        }
                    }
                    //}
                }
                catch (FaceAPIException ex)
                {
                    var error = ex.Message.ToString();
                }
            }
            catch (Exception ex)
            {
                var mensaje = ex.Message.ToString();
            }
            return(PersonName);
        }
コード例 #3
0
        public async Task <ObservableCollection <SincronizarFichaje.Empleado> > ObtenerIdentidades(ObservableCollection <FotosCapttuple> listadoCaras)
        {
            string subscriptionKey      = "5ff19b57095a4d10bf64274ed9e6ef30";
            string subscriptionEndpoint = "https://southcentralus.api.cognitive.microsoft.com/face/v1.0";
            var    faceServiceClient    = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);

            //int asyncresult = 0;
            //var progress = new Progress<string>(time =>
            //{
            //    SincronizarFichaje sincronizarFichaje = new SincronizarFichaje();
            //    sincronizarFichaje.NotifyUser(asyncresult.ToString(), SincronizarFichaje.NotifyType.ErrorMessage);
            //});
            //asyncresult = Task.Run(() => Notificar.Sincr(progress)).Result;


            byte[] arrayByteData;
            foreach (var item in listadoCaras)
            {
                var empleado = new SincronizarFichaje.Empleado();


                SoftwareBitmap softwareBitmap        = item.TupleFotosCapturadas.Item4;
                SoftwareBitmap softwareBitmapCropped = await CreateFromBitmap(softwareBitmap, (uint)softwareBitmap.PixelWidth, (uint)softwareBitmap.PixelHeight);

                SoftwareBitmap displayableImage = SoftwareBitmap.Convert(softwareBitmapCropped, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);


                arrayByteData = await EncodedBytesClass.EncodedBytes(displayableImage, BitmapEncoder.JpegEncoderId);

                var nuevoStreamFace = new MemoryStream(arrayByteData);

                try
                {
                    var faces = await faceServiceClient.DetectAsync(nuevoStreamFace, true, false);

                    var resultadoIdentifiacion = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : "1");

                    var res  = resultadoIdentifiacion[0];
                    var prob = resultadoIdentifiacion[0].Candidates[0].Confidence.ToString();

                    if (res.Candidates.Length > 0)
                    {
                        var nombrePersona = await faceServiceClient.GetPersonInLargePersonGroupAsync("1", res.Candidates[0].PersonId);

                        if (nombrePersona.Name.ToString() != "")
                        {
                            empleado.EmpleadoNombe = nombrePersona.Name.ToString();// + " - " + prob.ToString();
                            empleado.Horario       = item.TupleFotosCapturadas.Item2.ToString();
                        }
                        else
                        {
                            empleado.EmpleadoNombe = "No Identificado";
                            empleado.Horario       = item.TupleFotosCapturadas.Item2.ToString();
                        }



                        //var estadoAnimo =
                    }
                    else
                    {
                        //txtResult.Text = "Unknown";
                    }
                }
                catch (Exception ex)
                {
                    var error = ex.Message.ToString();
                }
                empleadosObS.Add(empleado);
            }


            return(empleadosObS);
        }
コード例 #4
0
        public async Task <string> ObtenerIdentidad()
        {
            byte[] arrayImage;
            var    PersonName = "";


            try
            {
                const BitmapPixelFormat InputPixelFormat1 = BitmapPixelFormat.Bgra8;

                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat1, (int)this.videoProperties.Width, (int)this.videoProperties.Height))
                {
                    var valor = await this.mediaCapture.GetPreviewFrameAsync(previewFrame);



                    SoftwareBitmap softwareBitmapPreviewFrame = valor.SoftwareBitmap;

                    Size  sizeCrop      = new Size(softwareBitmapPreviewFrame.PixelWidth, softwareBitmapPreviewFrame.PixelHeight);
                    Point point         = new Point(0, 0);
                    Rect  rect          = new Rect(0, 0, softwareBitmapPreviewFrame.PixelWidth, softwareBitmapPreviewFrame.PixelHeight);
                    var   arrayByteData = await EncodedBytes(softwareBitmapPreviewFrame, BitmapEncoder.JpegEncoderId);

                    SoftwareBitmap softwareBitmapCropped = await CreateFromBitmap(softwareBitmapPreviewFrame, (uint)softwareBitmapPreviewFrame.PixelWidth, (uint)softwareBitmapPreviewFrame.PixelHeight);

                    SoftwareBitmap displayableImage = SoftwareBitmap.Convert(softwareBitmapCropped, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);

                    arrayImage = await EncodedBytes(displayableImage, BitmapEncoder.JpegEncoderId);

                    var nuevoStreamFace = new MemoryStream(arrayImage);



                    //var ignored1 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    //{
                    //    softwareBitmapSource.SetBitmapAsync(displayableImage);

                    //    imagenCamaraWeb.Source = softwareBitmapSource;

                    //});

                    string subscriptionKey      = "a6fa05b6601b4ea398aa2039d601d983";
                    string subscriptionEndpoint = "https://southcentralus.api.cognitive.microsoft.com/face/v1.0";
                    var    faceServiceClient    = new FaceServiceClient(subscriptionKey, subscriptionEndpoint);

                    try
                    {
                        // using (var fsStream = File.OpenRead(sampleFile))
                        // {
                        IEnumerable <FaceAttributeType> faceAttributes =
                            new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair };


                        var faces = await faceServiceClient.DetectAsync(nuevoStreamFace, true, false, faceAttributes);

                        string edad   = string.Empty;
                        string genero = string.Empty;
                        var    resultadoIdentifiacion = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId);

                        var ignored2 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            try
                            {
                                var Status             = faces.Length.ToString();
                                txtResultServicio.Text = "Caras encontradas: " + Status.ToString();
                            }
                            catch (Exception ex)
                            {
                                txtResultServicio.Text = "Error 1: " + ex.Message.ToString();

                                throw;
                            }
                        });


                        for (int idx = 0; idx < faces.Length; idx++)
                        {
                            // Update identification result for rendering
                            edad   = faces[idx].FaceAttributes.Age.ToString();
                            genero = faces[idx].FaceAttributes.Gender.ToString();
                            if (genero != string.Empty)
                            {
                                if (genero == "male")
                                {
                                    genero = "Masculino";
                                }
                                else
                                {
                                    genero = "Femenino";
                                }
                            }



                            var res = resultadoIdentifiacion[idx];

                            if (res.Candidates.Length > 0)
                            {
                                var nombrePersona = await faceServiceClient.GetPersonInLargePersonGroupAsync(GroupId, res.Candidates[0].PersonId);

                                PersonName = nombrePersona.Name.ToString();
                                //var estadoAnimo =
                                var ignored3 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                {
                                    txtResult.Text = nombrePersona.Name.ToString() + " / " + genero.ToString();
                                });
                            }
                            else
                            {
                                txtResult.Text = "Unknown";
                            }
                        }
                        //}
                    }
                    catch (Exception ex)
                    {
                        var error    = ex.Message.ToString();
                        var ignored3 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            txtResultServicio.Text = "Error 2: " + error;
                        });
                    }
                }
            }
            catch (Exception ex)
            {
                var mensaje  = ex.Message.ToString();
                var ignored4 = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                {
                    txtResultServicio.Text = "Error 3: " + mensaje;
                });
            }
            return(PersonName);
        }