Beispiel #1
0
        /// <summary>
        /// Unmarshaller the response from the service to the response class.
        /// </summary>
        /// <param name="context"></param>
        /// <returns></returns>
        public override AmazonWebServiceResponse Unmarshall(JsonUnmarshallerContext context)
        {
            CompareFacesResponse response = new CompareFacesResponse();

            context.Read();
            int targetDepth = context.CurrentDepth;

            while (context.ReadAtDepth(targetDepth))
            {
                if (context.TestExpression("FaceMatches", targetDepth))
                {
                    var unmarshaller = new ListUnmarshaller <CompareFacesMatch, CompareFacesMatchUnmarshaller>(CompareFacesMatchUnmarshaller.Instance);
                    response.FaceMatches = unmarshaller.Unmarshall(context);
                    continue;
                }
                if (context.TestExpression("SourceImageFace", targetDepth))
                {
                    var unmarshaller = ComparedSourceImageFaceUnmarshaller.Instance;
                    response.SourceImageFace = unmarshaller.Unmarshall(context);
                    continue;
                }
            }

            return(response);
        }
Beispiel #2
0
        public async Task <bool> AuthenticateUserByFace(byte[] targetImage) //FileStream targetImage
        {
            float  similarityThreshold = 90F;
            String sourceImage         = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/123456.jpeg";
            // String targetImage = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/HappyFace.jpeg";


            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIAX2ZTBCX4OX6XH77Q", "X/FcCoEFyuIl5+hmwE+IVMk4t1089mgf0jIQI7Xo", RegionEndpoint.USWest2);

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
            try
            {
                var    webClient  = new WebClient();
                byte[] imageBytes = webClient.DownloadData(sourceImage);
                imageSource.Bytes = new MemoryStream(imageBytes);
            }
            catch
            {
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
            try
            {
                imageTarget.Bytes = new MemoryStream(targetImage);
            }
            catch
            {
            }

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            // Call operation
            CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

            if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK)
            {
                if (compareFacesResponse.SourceImageFace.Confidence > 90F)
                {
                    return(true);
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                return(false);
            }
        }
Beispiel #3
0
        public async Task <bool> IsSimillarFaceOnSourceImage(HubConnection hubConnection)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIA3GGXKFL7TG4ARQ5F", "iXqBARX00AiblMAAfvOIp6tKdwRrd/bQlvTicUcq", Amazon.RegionEndpoint.EUWest1);

            Image imageSource = new Image();

            using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }

            Image imageTarget = new Image();

            using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            try
            {
                CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

                foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                {
                    ComparedFace face     = match.Face;
                    BoundingBox  position = face.BoundingBox;
                    Console.WriteLine("Face at " + position.Left
                                      + " " + position.Top
                                      + " matches with " + match.Similarity
                                      + "% confidence.");
                    await hubConnection.SendAsync("HideWindows");
                }

                Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
                return(true);
            }
            catch (Exception)
            {
                return(false);
            }
        }
Beispiel #4
0
        public async Task <bool> AuthenticateUserByFace(byte[] targetImage)
        {
            try
            {
                float           similarityThreshold = 90F;
                CustomerDetails custInfo            = JsonConvert.DeserializeObject <CustomerDetails>(TempData["UserInfo"].ToString());
                string          sourceImage         = custInfo.body.CustomerDetails.ImageUrl;

                AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Environment.GetEnvironmentVariable("ACCESS_KEY_ID"), Environment.GetEnvironmentVariable("SECRET_ACCESS_KEY"), RegionEndpoint.USWest2);

                Image  imageSource = new Image();
                var    webClient   = new WebClient();
                byte[] imageBytes  = webClient.DownloadData(sourceImage);
                imageSource.Bytes = new MemoryStream(imageBytes);

                Image imageTarget = new Image();
                imageTarget.Bytes = new MemoryStream(targetImage);
                CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                {
                    SourceImage         = imageSource,
                    TargetImage         = imageTarget,
                    SimilarityThreshold = similarityThreshold
                };

                // Call operation
                CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest);

                if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK)
                {
                    if (compareFacesResponse.FaceMatches.Count > 0 && compareFacesResponse.FaceMatches.Count < 2)
                    {
                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                else
                {
                    return(false);
                }
            }
            catch
            {
                throw new Exception();
            }
        }
Beispiel #5
0
        public static CompareFacesResponse Unmarshall(UnmarshallerContext context)
        {
            CompareFacesResponse compareFacesResponse = new CompareFacesResponse();

            compareFacesResponse.HttpResponse = context.HttpResponse;
            compareFacesResponse.RequestId    = context.StringValue("CompareFaces.RequestId");
            compareFacesResponse.Success      = context.BooleanValue("CompareFaces.Success");
            compareFacesResponse.Code         = context.StringValue("CompareFaces.Code");
            compareFacesResponse.Message      = context.StringValue("CompareFaces.Message");

            CompareFacesResponse.CompareFaces_Data data = new CompareFacesResponse.CompareFaces_Data();
            data.SimilarityScore      = context.FloatValue("CompareFaces.Data.SimilarityScore");
            data.ConfidenceThresholds = context.StringValue("CompareFaces.Data.ConfidenceThresholds");
            compareFacesResponse.Data = data;

            return(compareFacesResponse);
        }
Beispiel #6
0
        private string CompareFaces(string strPersonName, MemoryStream msCapture, MemoryStream msFacePic)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("", "", Amazon.RegionEndpoint.USEast1);


            CompareFacesRequest req = new CompareFacesRequest();

            Amazon.Rekognition.Model.Image src = new Amazon.Rekognition.Model.Image();
            src.Bytes       = msCapture;
            req.SourceImage = src;


            Amazon.Rekognition.Model.Image trg = new Amazon.Rekognition.Model.Image();
            trg.Bytes = msFacePic;

            req.TargetImage = trg;
            try
            {
                CompareFacesResponse     compareFacesResult = rekognitionClient.CompareFaces(req);
                List <CompareFacesMatch> faceDetails        = compareFacesResult.FaceMatches;


                ComparedFace face = null;
                foreach (CompareFacesMatch match in faceDetails)
                {
                    face = match.Face;
                    BoundingBox position = face.BoundingBox;
                    System.Diagnostics.Debug.Write("Face at " + position.Left
                                                   + " " + position.Top
                                                   + " matches with " + face.Confidence
                                                   + "% confidence.");
                    if (face.Confidence > 75)
                    {
                        return(strPersonName);
                    }
                }
            }
            catch (Exception ex)
            {
                return("Fail");
            }


            return("Unknown");
        }
Beispiel #7
0
        static void Main(string[] args)
        {
            const float MIN_CONFIDENCE = 90F;

            try
            {
                string[] imagesList = GetListOfImages();
                if (imagesList == null || imagesList.Length == 0)
                {
                    Console.WriteLine("No images found in the Images folder");
                    return;
                }

                // Constructs a SharedCredentialsFile object from the default credentials file.
                SharedCredentialsFile sharedCredentialsFile = new SharedCredentialsFile();

                // Get the [default] profile from the credentials file.
                CredentialProfile defaultProfile = GetDefaultProfile(sharedCredentialsFile);

                if (defaultProfile != null)
                {
                    // Get the credentials (access key, secret access key, etc.)
                    AWSCredentials credentials = AWSCredentialsFactory.GetAWSCredentials(defaultProfile, new SharedCredentialsFile());

                    AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, RegionEndpoint.USEast1);

                    CompareFacesRequest detectFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage = GetImage(@"C:\Temp\TomCruise1.jpg"),
                        TargetImage = GetImage(@"C:\Temp\TomCruise2.jpg")
                    };

                    CompareFacesResponse     response = rekognitionClient.CompareFaces(detectFacesRequest);
                    List <CompareFacesMatch> list     = response.FaceMatches;

                    foreach (string filePath in imagesList)
                    {
                        Image image = GetImage(filePath);
                        if (image == null)
                        {
                            continue;
                        }

                        DetectLabelsRequest detectLabelsRequest = new DetectLabelsRequest()
                        {
                            Image         = image,
                            MinConfidence = MIN_CONFIDENCE,
                        };

                        DetectLabelsResponse detectLabelsResponse = rekognitionClient.DetectLabels(detectLabelsRequest);

                        Console.WriteLine("Image: {0}\n", filePath);
                        foreach (Label label in detectLabelsResponse.Labels)
                        {
                            Console.WriteLine("\t{0} ({1})", label.Name, label.Confidence);
                        }

                        Console.WriteLine();
                    }
                }
                else
                {
                    Console.WriteLine("AWS [default] profile not found");
                }
            }
            catch (AmazonRekognitionException ex)
            {
                Console.WriteLine("AWS Rekognition ERROR: {0}", ex.Message);
            }
            catch (Exception ex)
            {
                Console.WriteLine("ERROR: {0}", ex.Message);
            }

            Console.WriteLine("\nDONE");
            Console.ReadLine();
        }
Beispiel #8
0
        public Solicitudes GetTestAsync(List <Tab_ConfigSys> Tab_ConfigSys, Solicitudes sol)
        {
            string xClase   = string.Format("{0}|{1}", MethodBase.GetCurrentMethod().Module.Name, MethodBase.GetCurrentMethod().DeclaringType.Name);
            string xProceso = MethodBase.GetCurrentMethod().Name;

            var dto_excepcion = new UTL_TRA_EXCEPCION
            {
                STR_CLASE      = xClase,
                STR_EVENTO     = xProceso,
                STR_PARAMETROS = JsonConvert.SerializeObject(sol),
                STR_APLICATIVO = ConfigurationManager.AppSettings["APLICATIVO"].ToString(),
                STR_SERVIDOR   = System.Net.Dns.GetHostName(),
                FEC_CREACION   = DateTime.Now
            };
            Solicitudes _Solicitudes = new Solicitudes();

            var options = new CredentialProfileOptions
            {
                AccessKey = Tab_ConfigSys[0].llave_Config1,
                SecretKey = Tab_ConfigSys[0].llave_Config2
            };

            try
            {
                var profile = new Amazon.Runtime.CredentialManagement.CredentialProfile("AWSProfileName", options);
                profile.Region = RegionEndpoint.USWest1;
                var netSDKFile = new NetSDKCredentialsFile();
                netSDKFile.RegisterProfile(profile);

                float similarityThreshold = 70F;
                //String sourceImage = sol.arrImageSelfie;
                //String targetImage = sol.UrlFotoCedula;

                //using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2, RegionEndpoint.USWest1))
                using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2))
                {
                    Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();

                    //using (FileStream fs = new FileStream(new MemoryStream(bytes), FileMode.Open, FileAccess.Read))
                    //{
                    // byte[] data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(sol.arrImageSelfie);
                    // }


                    Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();

                    // using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                    //{
                    //  byte[] data = new byte[fs.Length];
                    //  data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(sol.arrImageCedulaFrontal);
                    // }


                    CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage         = imageSource,
                        TargetImage         = imageTarget,
                        SimilarityThreshold = similarityThreshold
                    };

                    // Call operation
                    CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

                    // Display results
                    //foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                    compareFacesResponse.FaceMatches.ForEach(match =>
                    {
                        ComparedFace face = match.Face;

                        BoundingBox position = face.BoundingBox;

                        _Solicitudes.PorcentMatched = face.Confidence;
                        _Solicitudes.PositionLeft   = position.Left;
                        _Solicitudes.PositionTop    = position.Top;
                    });

                    _Solicitudes.IdTipoIdentificacion = sol.IdTipoIdentificacion;
                    _Solicitudes.Identificacion       = sol.Identificacion;

                    if (_Solicitudes.PorcentMatched == 0 || _Solicitudes.PorcentMatched == null)
                    {
                        _Solicitudes.UnMatchedFace = compareFacesResponse.UnmatchedFaces[0].Confidence;
                    }
                    else
                    {
                        _Solicitudes.UnMatchedFace = 0;
                    }
                    _Solicitudes.ImageRotationSource = compareFacesResponse.SourceImageOrientationCorrection;
                    _Solicitudes.ImageRotationTarget = compareFacesResponse.TargetImageOrientationCorrection;
                }
                return(_Solicitudes);
            }
            catch (Exception ex)
            {
                dto_excepcion.STR_MENSAJE = ex.Message;
                dto_excepcion.IS_TELEGRAM = true;
                TwoFunTwoMe_DataAccess.Utility.guardaExcepcion(dto_excepcion, ConfigurationManager.ConnectionStrings["TwoFunTwoMeConnection"].ConnectionString);
                _Solicitudes.Mensaje = "ERR_imageTarget";
                throw;
            }
        }
        private void ScoreCompare(CompareFacesResponse compareFacesResponse)
        {
            LogInfo(JsonConvert.SerializeObject(compareFacesResponse));

            // LEVEL 3: make a criteria around comparing faces
        }
Beispiel #10
0
        public static async Task <string> FunctionHandler(String photo)
        {
            String bucket = "moodanalysis";
            //ArrayList result = new ArrayList();
            string result = "";

            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();

            // Recognizes User's face
            CompareFacesRequest CFR = new CompareFacesRequest()
            {
                //SimilarityThreshold = 50,

                SourceImage = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = "referencePhoto.jpg",
                        Bucket = bucket
                    },
                },

                TargetImage = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                },
            };

            CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(CFR);

            string howManyFaces = "";

            if (compareFacesResponse.FaceMatches.Count == 0)
            {
                return("");
            }

            //int index = 0, bestIndex = 0;
            var         bestMatch       = compareFacesResponse.FaceMatches[0];
            float       bestMatchResult = compareFacesResponse.FaceMatches[0].Similarity;
            BoundingBox bestBoundingBox = compareFacesResponse.FaceMatches[0].Face.BoundingBox;

            foreach (var faceMatch in compareFacesResponse.FaceMatches)
            {
                howManyFaces += faceMatch.Similarity + ",";

                if (bestMatchResult < faceMatch.Similarity)
                {
                    bestMatch       = faceMatch;
                    bestBoundingBox = faceMatch.Face.BoundingBox;
                    bestMatchResult = faceMatch.Similarity;
                }
            }

            // Detects emotions of faces in photo
            DetectFacesRequest detectFacesRequest = new DetectFacesRequest()
            {
                Image = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                },

                Attributes = new List <String>()
                {
                    "ALL"
                }
            };

            DetectFacesResponse detectFacesResponse = await rekognitionClient.DetectFacesAsync(detectFacesRequest);

            //int i = 0;
            foreach (FaceDetail face in detectFacesResponse.FaceDetails)
            {
                if (face.BoundingBox.Height == bestBoundingBox.Height &&
                    face.BoundingBox.Left == bestBoundingBox.Left &&
                    face.BoundingBox.Top == bestBoundingBox.Top &&
                    face.BoundingBox.Width == bestBoundingBox.Width)
                {
                    //var emotQuery = FilterEmotions(face, IsLowConfidence);

                    FilterEmotions filter = delegate(FaceDetail faceFilter, ConfidenceFilterDelegate confFilter)
                    {
                        return(faceFilter.Emotions.FindAll(n => confFilter(n)).ToList());
                    };

                    var emotQuery = filter(face, IsLowConfidence);

                    //IEnumerable<Emotion> emotQuery =
                    //    from faceEmotion in face.Emotions
                    //    where faceEmotion.Confidence > 10
                    //    select faceEmotion;

                    // GRAB THE EMOTION
                    foreach (Emotion emot in emotQuery)
                    {
                        result += emot.Type + ",";
                    }

                    break;
                }
            }

            //delete the last ,
            if (result.Length != 0)
            {
                result = result.Substring(0, result.Length - 1);
            }

            return(result);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            float  similarityThreshold = 70F;
            String sourceImage         = "source.jpg";
            String targetImage         = "target.jpg";
            String accessKeyID         = "";
            String secretKey           = "";

            AWSCredentials credentials;

            credentials = new BasicAWSCredentials(accessKeyID.Trim(), secretKey.Trim());

            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, Amazon.RegionEndpoint.USEast1);

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load source image: " + sourceImage);
                listBox1.Items.Add("Failed to load source image: " + sourceImage);
                return;
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load target image: " + targetImage);
                listBox1.Items.Add("Failed to load target image: " + targetImage);
                return;
            }

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            // Call operation
            CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);


            // Display results
            foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
            {
                ComparedFace face     = match.Face;
                BoundingBox  position = face.BoundingBox;
                //Console.WriteLine("Face at " + position.Left
                //      + " " + position.Top
                //      + " matches with " + match.Similarity
                //      + "% confidence.");
                listBox1.Items.Add("Face at " + position.Left
                                   + " " + position.Top
                                   + " matches with " + match.Similarity
                                   + "% confidence.");
            }

            //Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
            listBox1.Items.Add("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        }
Beispiel #12
0
        // Face detection method
        private async Task FacialRecognitionScan(ApplicationUser user, UsersInGymDetail currentFacilityDetail)
        {
            // initialize similarity threshold for accepting face match, source and target img.
            // S3 bucket img, dynamically selected based on user currently logged in.
            float  similarityThreshold = 70F;
            string photo       = $"{user.FirstName}_{user.Id}.jpg";
            String targetImage = $"{user.FirstName}_{user.Id}_Target.jpg";

            try
            {
                // create image objects
                Image imageSource = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = photo,
                        Bucket = bucket
                    },
                };
                Image imageTarget = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = targetImage,
                        Bucket = bucket
                    },
                };
                // create a compare face request object
                CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                {
                    SourceImage         = imageSource,
                    TargetImage         = imageTarget,
                    SimilarityThreshold = similarityThreshold
                };

                // detect face features of img scanned
                CompareFacesResponse compareFacesResponse = await AmazonRekognition.CompareFacesAsync(compareFacesRequest);

                // Display results
                foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                {
                    ComparedFace face = match.Face;
                    // if confidence for similarity is over 90 then grant access
                    if (match.Similarity > 90)
                    {
                        // if there is a match set scan success
                        user.IsCameraScanSuccessful = true;
                    }
                    else
                    {
                        ViewBag.MatchResult = "Facial Match Failed!";
                    }
                }
            }
            catch (Exception e)
            {
                _logger.LogInformation(e.Message);
            }

            // now add get facial details to display in the view.
            DetectFacesRequest detectFacesRequest = new DetectFacesRequest()
            {
                Image = new Image()
                {
                    S3Object = new S3Object()
                    {
                        Name   = targetImage,
                        Bucket = bucket
                    },
                },
                // "DEFAULT": BoundingBox, Confidence, Landmarks, Pose, and Quality.
                Attributes = new List <String>()
                {
                    "ALL"
                }
            };

            try
            {
                DetectFacesResponse detectFacesResponse = await AmazonRekognition.DetectFacesAsync(detectFacesRequest);

                bool hasAll = detectFacesRequest.Attributes.Contains("ALL");
                foreach (FaceDetail face in detectFacesResponse.FaceDetails)
                {
                    // if the face found has all attributes within a Detect Face object then save these values to the database.
                    if (hasAll)
                    {
                        currentFacilityDetail.IsSmiling    = face.Smile.Value;
                        currentFacilityDetail.Gender       = face.Gender.Value.ToString();
                        currentFacilityDetail.AgeRangeLow  = face.AgeRange.Low;
                        currentFacilityDetail.AgeRangeHigh = face.AgeRange.High;
                    }
                }
            }
            catch (Exception e)
            {
                _logger.LogInformation(e.Message);
            }
        }
Beispiel #13
0
    public static void Example()
    {
        float  similarityThreshold = 70F;
        String sourceImage         = "source.jpg";
        String targetImage         = "target.jpg";

        AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();

        Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load source image: " + sourceImage);
            return;
        }

        Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load target image: " + targetImage);
            return;
        }

        CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
        {
            SourceImage         = imageSource,
            TargetImage         = imageTarget,
            SimilarityThreshold = similarityThreshold
        };

        // Call operation
        CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

        // Display results
        foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
        {
            ComparedFace face     = match.Face;
            BoundingBox  position = face.BoundingBox;
            Console.WriteLine("Face at " + position.Left
                              + " " + position.Top
                              + " matches with " + face.Confidence
                              + "% confidence.");
        }

        Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        Console.WriteLine("Source image rotation: " + compareFacesResponse.SourceImageOrientationCorrection);
        Console.WriteLine("Target image rotation: " + compareFacesResponse.TargetImageOrientationCorrection);
    }