public async Task <CompareFacesResponse> CompareAsync(MemoryStream srcImg, MemoryStream trgtImg)
        {
            try
            {
                var sourceImg = new Image()
                {
                    Bytes = srcImg
                };
                var targetImg = new Image()
                {
                    Bytes = trgtImg
                };

                var request = new CompareFacesRequest()
                {
                    SimilarityThreshold = similarityThreshold,
                    SourceImage         = sourceImg,
                    TargetImage         = targetImg
                };

                return(await rekognitionClient.CompareFacesAsync(request));
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }


            return(null);
        }
Esempio n. 2
0
        internal CompareFacesResponse CompareFaces(CompareFacesRequest request)
        {
            var marshaller   = new CompareFacesRequestMarshaller();
            var unmarshaller = CompareFacesResponseUnmarshaller.Instance;

            return(Invoke <CompareFacesRequest, CompareFacesResponse>(request, marshaller, unmarshaller));
        }
Esempio n. 3
0
        // snippet-start:[Rekognition.dotnetv3.CompareFacesExample]
        public static async Task Main()
        {
            float  similarityThreshold = 70F;
            string sourceImage         = "source.jpg";
            string targetImage         = "target.jpg";

            var rekognitionClient = new AmazonRekognitionClient();

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();

            try
            {
                using FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read);
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }
            catch (Exception)
            {
                Console.WriteLine($"Failed to load source image: {sourceImage}");
                return;
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();

            try
            {
                using FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read);
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Failed to load target image: {targetImage}");
                Console.WriteLine(ex.Message);
                return;
            }

            var compareFacesRequest = new CompareFacesRequest
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold,
            };

            // Call operation
            var compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

            // Display results
            compareFacesResponse.FaceMatches.ForEach(match =>
            {
                ComparedFace face    = match.Face;
                BoundingBox position = face.BoundingBox;
                Console.WriteLine($"Face at {position.Left} {position.Top} matches with {match.Similarity}% confidence.");
            });

            Console.WriteLine($"Found {compareFacesResponse.UnmatchedFaces.Count} face(s) that did not match.");
        }
Esempio n. 4
0
        /// <summary>
        /// Initiates the asynchronous execution of the CompareFaces operation.
        /// </summary>
        ///
        /// <param name="request">Container for the necessary parameters to execute the CompareFaces operation.</param>
        /// <param name="cancellationToken">
        ///     A cancellation token that can be used by other objects or threads to receive notice of cancellation.
        /// </param>
        /// <returns>The task object representing the asynchronous operation.</returns>
        public Task <CompareFacesResponse> CompareFacesAsync(CompareFacesRequest request, System.Threading.CancellationToken cancellationToken = default(CancellationToken))
        {
            var marshaller   = new CompareFacesRequestMarshaller();
            var unmarshaller = CompareFacesResponseUnmarshaller.Instance;

            return(InvokeAsync <CompareFacesRequest, CompareFacesResponse>(request, marshaller,
                                                                           unmarshaller, cancellationToken));
        }
Esempio n. 5
0
        public async Task <bool> AuthenticateUserByFace(byte[] targetImage) //FileStream targetImage
        {
            float  similarityThreshold = 90F;
            String sourceImage         = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/123456.jpeg";
            // String targetImage = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/HappyFace.jpeg";


            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIAX2ZTBCX4OX6XH77Q", "X/FcCoEFyuIl5+hmwE+IVMk4t1089mgf0jIQI7Xo", RegionEndpoint.USWest2);

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
            try
            {
                var    webClient  = new WebClient();
                byte[] imageBytes = webClient.DownloadData(sourceImage);
                imageSource.Bytes = new MemoryStream(imageBytes);
            }
            catch
            {
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
            try
            {
                imageTarget.Bytes = new MemoryStream(targetImage);
            }
            catch
            {
            }

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            // Call operation
            CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

            if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK)
            {
                if (compareFacesResponse.SourceImageFace.Confidence > 90F)
                {
                    return(true);
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                return(false);
            }
        }
        public async Task <float> FindFaceOrThrowException(string s3ObjectIdCard, string s3ObjectPhotoToTest, float similarityThreshold, string personName)
        {
            var imgSrc = GetImageDefinition(s3ObjectIdCard);
            var imgTrg = GetImageDefinition(s3ObjectPhotoToTest);

            var compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imgSrc,
                TargetImage         = imgTrg,
                SimilarityThreshold = similarityThreshold
            };

            try
            {
                var compareFacesResponse = await _client.CompareFacesAsync(compareFacesRequest);

                // we are looking for two faces, the card and the actual picture taken by the person.
                var matchesWeCareAbout = compareFacesResponse.FaceMatches.OrderByDescending(m => m.Similarity).Take(2);

                if (matchesWeCareAbout.Count() < 2)
                {
                    throw new Exception(string.Format("You don't look like {0}.\nPlease ensure that both your card and your face can be seen.", personName));
                }

                // the card should be the smallest photo of the two with highest similarity
                var shouldBeTheCardBySize = compareFacesResponse.FaceMatches.OrderBy(m => m.Face.BoundingBox.Height * m.Face.BoundingBox.Width).Take(1).Single();

                // the card similarity should be very high

                if (shouldBeTheCardBySize.Similarity <= 95)
                {
                    throw new Exception("Is that your id card?");
                }

                var shoultBeTheFaceOfThePerson = compareFacesResponse.FaceMatches.OrderBy(m => m.Similarity).Take(1).Single();

                return(shoultBeTheFaceOfThePerson.Similarity);

                /*var match = shouldBeTheCardBySimilarity;
                 * var face = match.Face;
                 * var position = face.BoundingBox;
                 * System.Diagnostics.Debug.WriteLine("Face at " + position.Left + " " + position.Top + " matches with " + face.Confidence + "% confidence.");
                 * System.Diagnostics.Debug.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
                 * System.Diagnostics.Debug.WriteLine("Source image rotation: " + compareFacesResponse.SourceImageOrientationCorrection);
                 * System.Diagnostics.Debug.WriteLine("Target image rotation: " + compareFacesResponse.TargetImageOrientationCorrection);*/
            }
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine(e.Message);
                throw;
            }
        }
Esempio n. 7
0
        public async Task <bool> IsSimillarFaceOnSourceImage(HubConnection hubConnection)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIA3GGXKFL7TG4ARQ5F", "iXqBARX00AiblMAAfvOIp6tKdwRrd/bQlvTicUcq", Amazon.RegionEndpoint.EUWest1);

            Image imageSource = new Image();

            using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }

            Image imageTarget = new Image();

            using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            try
            {
                CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

                foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                {
                    ComparedFace face     = match.Face;
                    BoundingBox  position = face.BoundingBox;
                    Console.WriteLine("Face at " + position.Left
                                      + " " + position.Top
                                      + " matches with " + match.Similarity
                                      + "% confidence.");
                    await hubConnection.SendAsync("HideWindows");
                }

                Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
                return(true);
            }
            catch (Exception)
            {
                return(false);
            }
        }
Esempio n. 8
0
        private static async Task <CompareFacesResponse> Compare(Image image1, Image image2)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Amazon.RegionEndpoint.USWest2);
            float similarityThreshold = 0F; // set to 0 to see all probability scores

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = image1,
                TargetImage         = image2,
                SimilarityThreshold = similarityThreshold
            };

            return(await rekognitionClient.CompareFacesAsync(compareFacesRequest));
        }
Esempio n. 9
0
        private void btn_find_faces_Click(object sender, EventArgs e)
        {
            var source = ToBytesStream($"{sourceLocation}");
            var target = ToBytesStream($"{targetLocation}");

            var client  = new AmazonRekognitionClient();
            var request = new CompareFacesRequest {
                SourceImage = source, TargetImage = target, SimilarityThreshold = 90
            };

            var response = client.CompareFaces(request);

            txt_result.Text = $"Found {response.FaceMatches.Count} matched faces with threshold 90%";
        }
Esempio n. 10
0
        public async Task <bool> AuthenticateUserByFace(byte[] targetImage)
        {
            try
            {
                float           similarityThreshold = 90F;
                CustomerDetails custInfo            = JsonConvert.DeserializeObject <CustomerDetails>(TempData["UserInfo"].ToString());
                string          sourceImage         = custInfo.body.CustomerDetails.ImageUrl;

                AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Environment.GetEnvironmentVariable("ACCESS_KEY_ID"), Environment.GetEnvironmentVariable("SECRET_ACCESS_KEY"), RegionEndpoint.USWest2);

                Image  imageSource = new Image();
                var    webClient   = new WebClient();
                byte[] imageBytes  = webClient.DownloadData(sourceImage);
                imageSource.Bytes = new MemoryStream(imageBytes);

                Image imageTarget = new Image();
                imageTarget.Bytes = new MemoryStream(targetImage);
                CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                {
                    SourceImage         = imageSource,
                    TargetImage         = imageTarget,
                    SimilarityThreshold = similarityThreshold
                };

                // Call operation
                CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

                if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK)
                {
                    if (compareFacesResponse.FaceMatches.Count > 0 && compareFacesResponse.FaceMatches.Count < 2)
                    {
                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                else
                {
                    return(false);
                }
            }
            catch
            {
                throw new Exception();
            }
        }
Esempio n. 11
0
        private string CompareFaces(string strPersonName, MemoryStream msCapture, MemoryStream msFacePic)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("", "", Amazon.RegionEndpoint.USEast1);


            CompareFacesRequest req = new CompareFacesRequest();

            Amazon.Rekognition.Model.Image src = new Amazon.Rekognition.Model.Image();
            src.Bytes       = msCapture;
            req.SourceImage = src;


            Amazon.Rekognition.Model.Image trg = new Amazon.Rekognition.Model.Image();
            trg.Bytes = msFacePic;

            req.TargetImage = trg;
            try
            {
                CompareFacesResponse     compareFacesResult = rekognitionClient.CompareFaces(req);
                List <CompareFacesMatch> faceDetails        = compareFacesResult.FaceMatches;


                ComparedFace face = null;
                foreach (CompareFacesMatch match in faceDetails)
                {
                    face = match.Face;
                    BoundingBox position = face.BoundingBox;
                    System.Diagnostics.Debug.Write("Face at " + position.Left
                                                   + " " + position.Top
                                                   + " matches with " + face.Confidence
                                                   + "% confidence.");
                    if (face.Confidence > 75)
                    {
                        return(strPersonName);
                    }
                }
            }
            catch (Exception ex)
            {
                return("Fail");
            }


            return("Unknown");
        }
        public CompareFacesResponse CompareFaces(byte[] source, byte[] target)
        {
            AmazonRekognitionClient rekoClient = new AmazonRekognitionClient(_credentials, Amazon.RegionEndpoint.USWest2);

            CompareFacesRequest cfr = new CompareFacesRequest();

            Amazon.Rekognition.Model.Image sourceImage = new Amazon.Rekognition.Model.Image();
            Amazon.Rekognition.Model.Image targetImage = new Amazon.Rekognition.Model.Image();

            var sourceStream = new MemoryStream(source);
            var targetStream = new MemoryStream(target);

            sourceImage.Bytes = sourceStream;
            targetImage.Bytes = targetStream;

            cfr.SourceImage = sourceImage;
            cfr.TargetImage = targetImage;

            return(rekoClient.CompareFaces(cfr));
        }
Esempio n. 13
0
        public async Task <FaceMatchResponse> CompareFacesAsync(string sourceImage, string targetImage)
        {
            // Converte a imagem fonte em um objeto MemoryStream
            var imageSource = new Amazon.Rekognition.Model.Image();

            imageSource.Bytes = _serviceUtils.ConvertImageToMemoryStream(sourceImage);

            // Converte a imagem alvo em um objeto MemoryStream
            var imageTarget = new Amazon.Rekognition.Model.Image();

            imageTarget.Bytes = _serviceUtils.ConvertImageToMemoryStream(targetImage);

            // Configura o objeto que fará o request para o AWS Rekognition
            // A propriedade SimilarityThreshold ajusta o nível de similaridade na comparação das imagens
            var request = new CompareFacesRequest
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = 80f
            };

            // Faz a chamada do serviço de CompareFaces
            var response = await _rekognitionClient.CompareFacesAsync(request);

            // Verifica se houve algum match nas imagens
            var hasMatch = response.FaceMatches.Any();

            // Se não houve match ele retorna um objeto não encontrado
            if (!hasMatch)
            {
                return(new FaceMatchResponse(hasMatch, null, string.Empty));
            }

            // Com a imagem fonte e os parâmetros de retorno do match contornamos o rosto encontrado na imagem
            var fileName = _serviceUtils.Drawing(imageSource.Bytes, response.SourceImageFace);
            // Pega o percentual de similaridade da imagem encontrada
            var similarity = response.FaceMatches.FirstOrDefault().Similarity;

            // Retorna o objeto com as informações encontradas e com a URL para verificar a imagem
            return(new FaceMatchResponse(hasMatch, similarity, fileName));
        }
Esempio n. 14
0
        public Solicitudes GetTestAsync(List <Tab_ConfigSys> Tab_ConfigSys, Solicitudes sol)
        {
            string xClase   = string.Format("{0}|{1}", MethodBase.GetCurrentMethod().Module.Name, MethodBase.GetCurrentMethod().DeclaringType.Name);
            string xProceso = MethodBase.GetCurrentMethod().Name;

            var dto_excepcion = new UTL_TRA_EXCEPCION
            {
                STR_CLASE      = xClase,
                STR_EVENTO     = xProceso,
                STR_PARAMETROS = JsonConvert.SerializeObject(sol),
                STR_APLICATIVO = ConfigurationManager.AppSettings["APLICATIVO"].ToString(),
                STR_SERVIDOR   = System.Net.Dns.GetHostName(),
                FEC_CREACION   = DateTime.Now
            };
            Solicitudes _Solicitudes = new Solicitudes();

            var options = new CredentialProfileOptions
            {
                AccessKey = Tab_ConfigSys[0].llave_Config1,
                SecretKey = Tab_ConfigSys[0].llave_Config2
            };

            try
            {
                var profile = new Amazon.Runtime.CredentialManagement.CredentialProfile("AWSProfileName", options);
                profile.Region = RegionEndpoint.USWest1;
                var netSDKFile = new NetSDKCredentialsFile();
                netSDKFile.RegisterProfile(profile);

                float similarityThreshold = 70F;
                //String sourceImage = sol.arrImageSelfie;
                //String targetImage = sol.UrlFotoCedula;

                //using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2, RegionEndpoint.USWest1))
                using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2))
                {
                    Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();

                    //using (FileStream fs = new FileStream(new MemoryStream(bytes), FileMode.Open, FileAccess.Read))
                    //{
                    // byte[] data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(sol.arrImageSelfie);
                    // }


                    Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();

                    // using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                    //{
                    //  byte[] data = new byte[fs.Length];
                    //  data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(sol.arrImageCedulaFrontal);
                    // }


                    CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage         = imageSource,
                        TargetImage         = imageTarget,
                        SimilarityThreshold = similarityThreshold
                    };

                    // Call operation
                    CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

                    // Display results
                    //foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                    compareFacesResponse.FaceMatches.ForEach(match =>
                    {
                        ComparedFace face = match.Face;

                        BoundingBox position = face.BoundingBox;

                        _Solicitudes.PorcentMatched = face.Confidence;
                        _Solicitudes.PositionLeft   = position.Left;
                        _Solicitudes.PositionTop    = position.Top;
                    });

                    _Solicitudes.IdTipoIdentificacion = sol.IdTipoIdentificacion;
                    _Solicitudes.Identificacion       = sol.Identificacion;

                    if (_Solicitudes.PorcentMatched == 0 || _Solicitudes.PorcentMatched == null)
                    {
                        _Solicitudes.UnMatchedFace = compareFacesResponse.UnmatchedFaces[0].Confidence;
                    }
                    else
                    {
                        _Solicitudes.UnMatchedFace = 0;
                    }
                    _Solicitudes.ImageRotationSource = compareFacesResponse.SourceImageOrientationCorrection;
                    _Solicitudes.ImageRotationTarget = compareFacesResponse.TargetImageOrientationCorrection;
                }
                return(_Solicitudes);
            }
            catch (Exception ex)
            {
                dto_excepcion.STR_MENSAJE = ex.Message;
                dto_excepcion.IS_TELEGRAM = true;
                TwoFunTwoMe_DataAccess.Utility.guardaExcepcion(dto_excepcion, ConfigurationManager.ConnectionStrings["TwoFunTwoMeConnection"].ConnectionString);
                _Solicitudes.Mensaje = "ERR_imageTarget";
                throw;
            }
        }
Esempio n. 15
0
        private static async Task Main(string[] args)
        {
            const string AWS_ACCESS_KEY_ID     = "AWS_ACCESS_KEY_ID";
            const string AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY";

            Console.WriteLine("Hello World!");

            var self = await File.ReadAllBytesAsync("assets\\self.jpg");

            var front = await File.ReadAllBytesAsync("assets\\front.png");

            var back = await File.ReadAllBytesAsync("assets\\back.png");

            var command = new AnalizeDocumentCommand {
                Self = self, Back = back, Front = front
            };

            var region = RegionEndpoint.USEast1;
            var client = new AmazonRekognitionClient(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, region);

            #region Analiza se é documento
            using (var stream = new MemoryStream(command.Back))
            {
                var request = new DetectLabelsRequest {
                    Image = new Image {
                        Bytes = stream
                    }
                };

                var response = await client.DetectLabelsAsync(request);

                var labels = response.Labels;

                foreach (var label in labels)
                {
                    var accuracy = Accuracy.GetAccuracy(label.Confidence);

                    if (DocumentTypes.IsValidDocument(label.Name))
                    {
                        if (accuracy.IsLow)
                        {
                            Console.WriteLine("Não é um documento");
                        }
                        if (accuracy.IsMedium)
                        {
                            Console.WriteLine("Pode ser que seja um documento");
                        }
                        if (accuracy.IsHigh)
                        {
                            Console.WriteLine("É muito provável que seja um documento");
                        }

                        break;
                    }
                }
            }
            #endregion

            #region Compara com a self
            using (var source = new MemoryStream(command.Self))
                using (var target = new MemoryStream(command.Front))
                {
                    var request = new CompareFacesRequest {
                        SourceImage = new Image {
                            Bytes = source
                        }, TargetImage = new Image {
                            Bytes = target
                        }
                    };

                    var response = await client.CompareFacesAsync(request);

                    var faces = response.FaceMatches;

                    if (faces.Count != 1)
                    {
                        Console.WriteLine("Resultado inconsistente");
                    }

                    var accuracy = Accuracy.GetAccuracy(faces.First().Similarity);

                    if (accuracy.IsLow)
                    {
                        Console.WriteLine("Esse documento não da mesma pessoa");
                    }
                    if (accuracy.IsMedium)
                    {
                        Console.WriteLine("Pode ser que este documento seja da mesma pessoa");
                    }
                    if (accuracy.IsHigh)
                    {
                        Console.WriteLine("É muito provável que este documento seja da mesma pessoa");
                    }
                }
            #endregion

            #region Verifica se é do portador válido
            using (var stream = new MemoryStream(command.Back))
            {
                var request = new DetectTextRequest {
                    Image = new Image {
                        Bytes = stream
                    }
                };

                var response = await client.DetectTextAsync(request);

                var texts = response.TextDetections;

                foreach (var text in texts)
                {
                    var accuracy = Accuracy.GetAccuracy(text.Confidence);

                    if ("CPF".Equals(text.DetectedText, StringComparison.InvariantCultureIgnoreCase))
                    {
                        if (accuracy.IsLow)
                        {
                            Console.WriteLine("não contém um número de CPF");
                        }
                        if (accuracy.IsMedium)
                        {
                            Console.WriteLine("Pode ser que contenha um número de CPF");
                        }
                        if (accuracy.IsHigh)
                        {
                            Console.WriteLine("É muito provável que contenha um número de CPF");
                        }

                        break;
                    }
                }
            }
            #endregion

            Console.WriteLine("That's all folks!");
        }
Esempio n. 16
0
        static void Main(string[] args)
        {
            const float MIN_CONFIDENCE = 90F;

            try
            {
                string[] imagesList = GetListOfImages();
                if (imagesList == null || imagesList.Length == 0)
                {
                    Console.WriteLine("No images found in the Images folder");
                    return;
                }

                // Constructs a SharedCredentialsFile object from the default credentials file.
                SharedCredentialsFile sharedCredentialsFile = new SharedCredentialsFile();

                // Get the [default] profile from the credentials file.
                CredentialProfile defaultProfile = GetDefaultProfile(sharedCredentialsFile);

                if (defaultProfile != null)
                {
                    // Get the credentials (access key, secret access key, etc.)
                    AWSCredentials credentials = AWSCredentialsFactory.GetAWSCredentials(defaultProfile, new SharedCredentialsFile());

                    AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, RegionEndpoint.USEast1);

                    CompareFacesRequest detectFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage = GetImage(@"C:\Temp\TomCruise1.jpg"),
                        TargetImage = GetImage(@"C:\Temp\TomCruise2.jpg")
                    };

                    CompareFacesResponse     response = rekognitionClient.CompareFaces(detectFacesRequest);
                    List <CompareFacesMatch> list     = response.FaceMatches;

                    foreach (string filePath in imagesList)
                    {
                        Image image = GetImage(filePath);
                        if (image == null)
                        {
                            continue;
                        }

                        DetectLabelsRequest detectLabelsRequest = new DetectLabelsRequest()
                        {
                            Image         = image,
                            MinConfidence = MIN_CONFIDENCE,
                        };

                        DetectLabelsResponse detectLabelsResponse = rekognitionClient.DetectLabels(detectLabelsRequest);

                        Console.WriteLine("Image: {0}\n", filePath);
                        foreach (Label label in detectLabelsResponse.Labels)
                        {
                            Console.WriteLine("\t{0} ({1})", label.Name, label.Confidence);
                        }

                        Console.WriteLine();
                    }
                }
                else
                {
                    Console.WriteLine("AWS [default] profile not found");
                }
            }
            catch (AmazonRekognitionException ex)
            {
                Console.WriteLine("AWS Rekognition ERROR: {0}", ex.Message);
            }
            catch (Exception ex)
            {
                Console.WriteLine("ERROR: {0}", ex.Message);
            }

            Console.WriteLine("\nDONE");
            Console.ReadLine();
        }
Esempio n. 17
0
    public static void Example()
    {
        float  similarityThreshold = 70F;
        String sourceImage         = "source.jpg";
        String targetImage         = "target.jpg";

        AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();

        Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load source image: " + sourceImage);
            return;
        }

        Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load target image: " + targetImage);
            return;
        }

        CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
        {
            SourceImage         = imageSource,
            TargetImage         = imageTarget,
            SimilarityThreshold = similarityThreshold
        };

        // Call operation
        CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

        // Display results
        foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
        {
            ComparedFace face     = match.Face;
            BoundingBox  position = face.BoundingBox;
            Console.WriteLine("Face at " + position.Left
                              + " " + position.Top
                              + " matches with " + face.Confidence
                              + "% confidence.");
        }

        Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        Console.WriteLine("Source image rotation: " + compareFacesResponse.SourceImageOrientationCorrection);
        Console.WriteLine("Target image rotation: " + compareFacesResponse.TargetImageOrientationCorrection);
    }
Esempio n. 18
0
        public static async Task <string> FunctionHandler(String photo)
        {
            String bucket = "moodanalysis";
            //ArrayList result = new ArrayList();
            string result = "";

            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();

            // Recognizes User's face
            CompareFacesRequest CFR = new CompareFacesRequest()
            {
                //SimilarityThreshold = 50,

                SourceImage = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = "referencePhoto.jpg",
                        Bucket = bucket
                    },
                },

                TargetImage = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                },
            };

            CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(CFR);

            string howManyFaces = "";

            if (compareFacesResponse.FaceMatches.Count == 0)
            {
                return("");
            }

            //int index = 0, bestIndex = 0;
            var         bestMatch       = compareFacesResponse.FaceMatches[0];
            float       bestMatchResult = compareFacesResponse.FaceMatches[0].Similarity;
            BoundingBox bestBoundingBox = compareFacesResponse.FaceMatches[0].Face.BoundingBox;

            foreach (var faceMatch in compareFacesResponse.FaceMatches)
            {
                howManyFaces += faceMatch.Similarity + ",";

                if (bestMatchResult < faceMatch.Similarity)
                {
                    bestMatch       = faceMatch;
                    bestBoundingBox = faceMatch.Face.BoundingBox;
                    bestMatchResult = faceMatch.Similarity;
                }
            }

            // Detects emotions of faces in photo
            DetectFacesRequest detectFacesRequest = new DetectFacesRequest()
            {
                Image = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                },

                Attributes = new List <String>()
                {
                    "ALL"
                }
            };

            DetectFacesResponse detectFacesResponse = await rekognitionClient.DetectFacesAsync(detectFacesRequest);

            //int i = 0;
            foreach (FaceDetail face in detectFacesResponse.FaceDetails)
            {
                if (face.BoundingBox.Height == bestBoundingBox.Height &&
                    face.BoundingBox.Left == bestBoundingBox.Left &&
                    face.BoundingBox.Top == bestBoundingBox.Top &&
                    face.BoundingBox.Width == bestBoundingBox.Width)
                {
                    //var emotQuery = FilterEmotions(face, IsLowConfidence);

                    FilterEmotions filter = delegate(FaceDetail faceFilter, ConfidenceFilterDelegate confFilter)
                    {
                        return(faceFilter.Emotions.FindAll(n => confFilter(n)).ToList());
                    };

                    var emotQuery = filter(face, IsLowConfidence);

                    //IEnumerable<Emotion> emotQuery =
                    //    from faceEmotion in face.Emotions
                    //    where faceEmotion.Confidence > 10
                    //    select faceEmotion;

                    // GRAB THE EMOTION
                    foreach (Emotion emot in emotQuery)
                    {
                        result += emot.Type + ",";
                    }

                    break;
                }
            }

            //delete the last ,
            if (result.Length != 0)
            {
                result = result.Substring(0, result.Length - 1);
            }

            return(result);
        }
Esempio n. 19
0
        private void button1_Click(object sender, EventArgs e)
        {
            float  similarityThreshold = 70F;
            String sourceImage         = "source.jpg";
            String targetImage         = "target.jpg";
            String accessKeyID         = "";
            String secretKey           = "";

            AWSCredentials credentials;

            credentials = new BasicAWSCredentials(accessKeyID.Trim(), secretKey.Trim());

            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, Amazon.RegionEndpoint.USEast1);

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load source image: " + sourceImage);
                listBox1.Items.Add("Failed to load source image: " + sourceImage);
                return;
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load target image: " + targetImage);
                listBox1.Items.Add("Failed to load target image: " + targetImage);
                return;
            }

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            // Call operation
            CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);


            // Display results
            foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
            {
                ComparedFace face     = match.Face;
                BoundingBox  position = face.BoundingBox;
                //Console.WriteLine("Face at " + position.Left
                //      + " " + position.Top
                //      + " matches with " + match.Similarity
                //      + "% confidence.");
                listBox1.Items.Add("Face at " + position.Left
                                   + " " + position.Top
                                   + " matches with " + match.Similarity
                                   + "% confidence.");
            }

            //Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
            listBox1.Items.Add("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        }
Esempio n. 20
0
        // Face detection method
        private async Task FacialRecognitionScan(ApplicationUser user, UsersInGymDetail currentFacilityDetail)
        {
            // initialize similarity threshold for accepting face match, source and target img.
            // S3 bucket img, dynamically selected based on user currently logged in.
            float  similarityThreshold = 70F;
            string photo       = $"{user.FirstName}_{user.Id}.jpg";
            String targetImage = $"{user.FirstName}_{user.Id}_Target.jpg";

            try
            {
                // create image objects
                Image imageSource = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                };
                Image imageTarget = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = targetImage,
                        Bucket = bucket
                    },
                };
                // create a compare face request object
                CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                {
                    SourceImage         = imageSource,
                    TargetImage         = imageTarget,
                    SimilarityThreshold = similarityThreshold
                };

                // detect face features of img scanned
                CompareFacesResponse compareFacesResponse = await AmazonRekognition.CompareFacesAsync(compareFacesRequest);

                // Display results
                foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                {
                    ComparedFace face = match.Face;
                    // if confidence for similarity is over 90 then grant access
                    if (match.Similarity > 90)
                    {
                        // if there is a match set scan success
                        user.IsCameraScanSuccessful = true;
                    }
                    else
                    {
                        ViewBag.MatchResult = "Facial Match Failed!";
                    }
                }
            }
            catch (Exception e)
            {
                _logger.LogInformation(e.Message);
            }

            // now add get facial details to display in the view.
            DetectFacesRequest detectFacesRequest = new DetectFacesRequest()
            {
                Image = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = targetImage,
                        Bucket = bucket
                    },
                },
                // "DEFAULT": BoundingBox, Confidence, Landmarks, Pose, and Quality.
                Attributes = new List <String>()
                {
                    "ALL"
                }
            };

            try
            {
                DetectFacesResponse detectFacesResponse = await AmazonRekognition.DetectFacesAsync(detectFacesRequest);

                bool hasAll = detectFacesRequest.Attributes.Contains("ALL");
                foreach (FaceDetail face in detectFacesResponse.FaceDetails)
                {
                    // if the face found has all attributes within a Detect Face object then save these values to the database.
                    if (hasAll)
                    {
                        currentFacilityDetail.IsSmiling    = face.Smile.Value;
                        currentFacilityDetail.Gender       = face.Gender.Value.ToString();
                        currentFacilityDetail.AgeRangeLow  = face.AgeRange.Low;
                        currentFacilityDetail.AgeRangeHigh = face.AgeRange.High;
                    }
                }
            }
            catch (Exception e)
            {
                _logger.LogInformation(e.Message);
            }
        }