コード例 #1
0
        public void RekognitionCompareFaces()
        {
            #region to-compare-two-images-1482181985581

            var client   = new AmazonRekognitionClient();
            var response = client.CompareFaces(new CompareFacesRequest
            {
                SimilarityThreshold = 90,
                SourceImage         = new Image {
                    S3Object = new S3Object {
                        Bucket = "mybucket",
                        Name   = "mysourceimage"
                    }
                },
                TargetImage = new Image {
                    S3Object = new S3Object {
                        Bucket = "mybucket",
                        Name   = "mytargetimage"
                    }
                }
            });

            List <CompareFacesMatch> faceMatches     = response.FaceMatches;
            ComparedSourceImageFace  sourceImageFace = response.SourceImageFace;

            #endregion
        }
コード例 #2
0
        private void btn_find_faces_Click(object sender, EventArgs e)
        {
            var source = ToBytesStream($"{sourceLocation}");
            var target = ToBytesStream($"{targetLocation}");

            var client  = new AmazonRekognitionClient();
            var request = new CompareFacesRequest {
                SourceImage = source, TargetImage = target, SimilarityThreshold = 90
            };

            var response = client.CompareFaces(request);

            txt_result.Text = $"Found {response.FaceMatches.Count} matched faces with threshold 90%";
        }
コード例 #3
0
        private string CompareFaces(string strPersonName, MemoryStream msCapture, MemoryStream msFacePic)
        {
            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("", "", Amazon.RegionEndpoint.USEast1);


            CompareFacesRequest req = new CompareFacesRequest();

            Amazon.Rekognition.Model.Image src = new Amazon.Rekognition.Model.Image();
            src.Bytes       = msCapture;
            req.SourceImage = src;


            Amazon.Rekognition.Model.Image trg = new Amazon.Rekognition.Model.Image();
            trg.Bytes = msFacePic;

            req.TargetImage = trg;
            try
            {
                CompareFacesResponse     compareFacesResult = rekognitionClient.CompareFaces(req);
                List <CompareFacesMatch> faceDetails        = compareFacesResult.FaceMatches;


                ComparedFace face = null;
                foreach (CompareFacesMatch match in faceDetails)
                {
                    face = match.Face;
                    BoundingBox position = face.BoundingBox;
                    System.Diagnostics.Debug.Write("Face at " + position.Left
                                                   + " " + position.Top
                                                   + " matches with " + face.Confidence
                                                   + "% confidence.");
                    if (face.Confidence > 75)
                    {
                        return(strPersonName);
                    }
                }
            }
            catch (Exception ex)
            {
                return("Fail");
            }


            return("Unknown");
        }
コード例 #4
0
        public CompareFacesResponse CompareFaces(byte[] source, byte[] target)
        {
            AmazonRekognitionClient rekoClient = new AmazonRekognitionClient(_credentials, Amazon.RegionEndpoint.USWest2);

            CompareFacesRequest cfr = new CompareFacesRequest();

            Amazon.Rekognition.Model.Image sourceImage = new Amazon.Rekognition.Model.Image();
            Amazon.Rekognition.Model.Image targetImage = new Amazon.Rekognition.Model.Image();

            var sourceStream = new MemoryStream(source);
            var targetStream = new MemoryStream(target);

            sourceImage.Bytes = sourceStream;
            targetImage.Bytes = targetStream;

            cfr.SourceImage = sourceImage;
            cfr.TargetImage = targetImage;

            return(rekoClient.CompareFaces(cfr));
        }
コード例 #5
0
        static void Main(string[] args)
        {
            const float MIN_CONFIDENCE = 90F;

            try
            {
                string[] imagesList = GetListOfImages();
                if (imagesList == null || imagesList.Length == 0)
                {
                    Console.WriteLine("No images found in the Images folder");
                    return;
                }

                // Constructs a SharedCredentialsFile object from the default credentials file.
                SharedCredentialsFile sharedCredentialsFile = new SharedCredentialsFile();

                // Get the [default] profile from the credentials file.
                CredentialProfile defaultProfile = GetDefaultProfile(sharedCredentialsFile);

                if (defaultProfile != null)
                {
                    // Get the credentials (access key, secret access key, etc.)
                    AWSCredentials credentials = AWSCredentialsFactory.GetAWSCredentials(defaultProfile, new SharedCredentialsFile());

                    AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, RegionEndpoint.USEast1);

                    CompareFacesRequest detectFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage = GetImage(@"C:\Temp\TomCruise1.jpg"),
                        TargetImage = GetImage(@"C:\Temp\TomCruise2.jpg")
                    };

                    CompareFacesResponse     response = rekognitionClient.CompareFaces(detectFacesRequest);
                    List <CompareFacesMatch> list     = response.FaceMatches;

                    foreach (string filePath in imagesList)
                    {
                        Image image = GetImage(filePath);
                        if (image == null)
                        {
                            continue;
                        }

                        DetectLabelsRequest detectLabelsRequest = new DetectLabelsRequest()
                        {
                            Image         = image,
                            MinConfidence = MIN_CONFIDENCE,
                        };

                        DetectLabelsResponse detectLabelsResponse = rekognitionClient.DetectLabels(detectLabelsRequest);

                        Console.WriteLine("Image: {0}\n", filePath);
                        foreach (Label label in detectLabelsResponse.Labels)
                        {
                            Console.WriteLine("\t{0} ({1})", label.Name, label.Confidence);
                        }

                        Console.WriteLine();
                    }
                }
                else
                {
                    Console.WriteLine("AWS [default] profile not found");
                }
            }
            catch (AmazonRekognitionException ex)
            {
                Console.WriteLine("AWS Rekognition ERROR: {0}", ex.Message);
            }
            catch (Exception ex)
            {
                Console.WriteLine("ERROR: {0}", ex.Message);
            }

            Console.WriteLine("\nDONE");
            Console.ReadLine();
        }
コード例 #6
0
ファイル: AWSAccess.cs プロジェクト: Alfredo1292/MVC2fun2me
        public Solicitudes GetTestAsync(List <Tab_ConfigSys> Tab_ConfigSys, Solicitudes sol)
        {
            string xClase   = string.Format("{0}|{1}", MethodBase.GetCurrentMethod().Module.Name, MethodBase.GetCurrentMethod().DeclaringType.Name);
            string xProceso = MethodBase.GetCurrentMethod().Name;

            var dto_excepcion = new UTL_TRA_EXCEPCION
            {
                STR_CLASE      = xClase,
                STR_EVENTO     = xProceso,
                STR_PARAMETROS = JsonConvert.SerializeObject(sol),
                STR_APLICATIVO = ConfigurationManager.AppSettings["APLICATIVO"].ToString(),
                STR_SERVIDOR   = System.Net.Dns.GetHostName(),
                FEC_CREACION   = DateTime.Now
            };
            Solicitudes _Solicitudes = new Solicitudes();

            var options = new CredentialProfileOptions
            {
                AccessKey = Tab_ConfigSys[0].llave_Config1,
                SecretKey = Tab_ConfigSys[0].llave_Config2
            };

            try
            {
                var profile = new Amazon.Runtime.CredentialManagement.CredentialProfile("AWSProfileName", options);
                profile.Region = RegionEndpoint.USWest1;
                var netSDKFile = new NetSDKCredentialsFile();
                netSDKFile.RegisterProfile(profile);

                float similarityThreshold = 70F;
                //String sourceImage = sol.arrImageSelfie;
                //String targetImage = sol.UrlFotoCedula;

                //using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2, RegionEndpoint.USWest1))
                using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Tab_ConfigSys[0].llave_Config1, Tab_ConfigSys[0].llave_Config2))
                {
                    Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();

                    //using (FileStream fs = new FileStream(new MemoryStream(bytes), FileMode.Open, FileAccess.Read))
                    //{
                    // byte[] data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(sol.arrImageSelfie);
                    // }


                    Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();

                    // using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                    //{
                    //  byte[] data = new byte[fs.Length];
                    //  data = new byte[fs.Length];
                    //  fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(sol.arrImageCedulaFrontal);
                    // }


                    CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
                    {
                        SourceImage         = imageSource,
                        TargetImage         = imageTarget,
                        SimilarityThreshold = similarityThreshold
                    };

                    // Call operation
                    CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

                    // Display results
                    //foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
                    compareFacesResponse.FaceMatches.ForEach(match =>
                    {
                        ComparedFace face = match.Face;

                        BoundingBox position = face.BoundingBox;

                        _Solicitudes.PorcentMatched = face.Confidence;
                        _Solicitudes.PositionLeft   = position.Left;
                        _Solicitudes.PositionTop    = position.Top;
                    });

                    _Solicitudes.IdTipoIdentificacion = sol.IdTipoIdentificacion;
                    _Solicitudes.Identificacion       = sol.Identificacion;

                    if (_Solicitudes.PorcentMatched == 0 || _Solicitudes.PorcentMatched == null)
                    {
                        _Solicitudes.UnMatchedFace = compareFacesResponse.UnmatchedFaces[0].Confidence;
                    }
                    else
                    {
                        _Solicitudes.UnMatchedFace = 0;
                    }
                    _Solicitudes.ImageRotationSource = compareFacesResponse.SourceImageOrientationCorrection;
                    _Solicitudes.ImageRotationTarget = compareFacesResponse.TargetImageOrientationCorrection;
                }
                return(_Solicitudes);
            }
            catch (Exception ex)
            {
                dto_excepcion.STR_MENSAJE = ex.Message;
                dto_excepcion.IS_TELEGRAM = true;
                TwoFunTwoMe_DataAccess.Utility.guardaExcepcion(dto_excepcion, ConfigurationManager.ConnectionStrings["TwoFunTwoMeConnection"].ConnectionString);
                _Solicitudes.Mensaje = "ERR_imageTarget";
                throw;
            }
        }
コード例 #7
0
        private void button1_Click(object sender, EventArgs e)
        {
            float  similarityThreshold = 70F;
            String sourceImage         = "source.jpg";
            String targetImage         = "target.jpg";
            String accessKeyID         = "";
            String secretKey           = "";

            AWSCredentials credentials;

            credentials = new BasicAWSCredentials(accessKeyID.Trim(), secretKey.Trim());

            AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(credentials, Amazon.RegionEndpoint.USEast1);

            Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageSource.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load source image: " + sourceImage);
                listBox1.Items.Add("Failed to load source image: " + sourceImage);
                return;
            }

            Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
            try
            {
                using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
                {
                    byte[] data = new byte[fs.Length];
                    data = new byte[fs.Length];
                    fs.Read(data, 0, (int)fs.Length);
                    imageTarget.Bytes = new MemoryStream(data);
                }
            }
            catch (Exception)
            {
                //Console.WriteLine("Failed to load target image: " + targetImage);
                listBox1.Items.Add("Failed to load target image: " + targetImage);
                return;
            }

            CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
            {
                SourceImage         = imageSource,
                TargetImage         = imageTarget,
                SimilarityThreshold = similarityThreshold
            };

            // Call operation
            CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);


            // Display results
            foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
            {
                ComparedFace face     = match.Face;
                BoundingBox  position = face.BoundingBox;
                //Console.WriteLine("Face at " + position.Left
                //      + " " + position.Top
                //      + " matches with " + match.Similarity
                //      + "% confidence.");
                listBox1.Items.Add("Face at " + position.Left
                                   + " " + position.Top
                                   + " matches with " + match.Similarity
                                   + "% confidence.");
            }

            //Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
            listBox1.Items.Add("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        }
コード例 #8
0
        protected void btnUpload_Click(object sender, EventArgs e)
        {
            /*
             *  First upload image in bucket
             *  detect face while uploading image into bucket
             *  compare face in face collection
             *  get list of image from bucket
             *  store uploaded image in face collection
             *
             * compare face in face collection while uploading
             *
             *
             * */
            try
            {
                image.Visible   = false;
                lblMessage.Text = "";
                string imagefile = string.Empty;
                if (fuImage.HasFile)
                {
                    string imagename = System.IO.Path.GetFullPath(fuImage.PostedFile.FileName);
                    //Label1.Text = imagename;
                    string ext = System.IO.Path.GetExtension(fuImage.FileName);
                    // Label2.Text = ext;
                    // imagefile = Server.MapPath("~/Images/" + imagename);
                    imagefile = imagename;
                    if (ext == ".jpg" | ext == ".png")
                    {
                        //fuImage.SaveAs(imagefile);

                        Stream             st         = fuImage.PostedFile.InputStream;
                        IAmazonRekognition rekoClient = new AmazonRekognitionClient(Amazon.RegionEndpoint.USEast1);
                        string             name       = Path.GetFileName(fuImage.PostedFile.FileName);
                        name = @"C:\Blazar\CompareImage\" + fuImage.FileName;
                        string myBucketName    = "blazarstorage"; //your s3 bucket name goes here
                        string s3DirectoryName = "";
                        string s3FileName      = @name;
                        string fileName        = fuImage.FileName;

                        /*
                         * StoreFaceInCollection(@name, rekoClient, myBucketName);
                         *
                         * ImageRecognition imageRecog = new ImageRecognition();
                         * IdentifyFaces(name, fuImage.PostedFile.FileName.ToString(), myBucketName);
                         * */
                        //STEP----1
                        //validate the image is a face or not--step 1
                        //bool isFaceImage = DetectFaces(fileName);
                        //bool isFaceImage = DetectFaces(name);
                        bool isFaceImage = true;
                        if (isFaceImage == true)
                        {
                            /*
                             * we can compare image if those are inside buckets.
                             * For comparing images , we have to follow below steps
                             *   store it in bucket
                             *   add to it face collection
                             *   compare images
                             *
                             */


                            //
                            bool isUpload;
                            //upload image into bucket
                            isUpload = sendMyFileToS3(st, myBucketName, s3DirectoryName, s3FileName, fileName);
                            if (isUpload == true)
                            {
                                //store image in a face collection
                                StoreFaceInCollection(@name, rekoClient, myBucketName);

                                //  AmazonUploader myUploader = new AmazonUploader();
                                //  bool b = myUploader.IsFilExist("AKIAIPXX2OIGXNKENL6Q", "oQQn3l4ll5/J/OY2RoZG4EV4RZtv8EsD114MrRnR");
                                //validate the existance of the image in the bucket
                                //for that we have to get all the face from face collection
                                try
                                {
                                    AmazonS3Client       s3Client            = new AmazonS3Client(Amazon.RegionEndpoint.USEast1);
                                    ListObjectsV2Request reqGetObjFromBucket = new ListObjectsV2Request
                                    {
                                        BucketName = myBucketName,
                                        MaxKeys    = 1000
                                    };
                                    ListObjectsV2Response resGetObjFromBucket;
                                    do
                                    {
                                        resGetObjFromBucket = s3Client.ListObjectsV2(reqGetObjFromBucket);
                                        foreach (Amazon.S3.Model.S3Object entry in resGetObjFromBucket.S3Objects)
                                        {
                                            //if (DetectFaces(entry.Key))//validat the image content
                                            //{
                                            //  if (s3FileName != entry.Key)//input image should not compare
                                            if (fileName != entry.Key)
                                            {
                                                var response = rekoClient.CompareFaces(new CompareFacesRequest
                                                {
                                                    SimilarityThreshold = 90,
                                                    SourceImage         = new Amazon.Rekognition.Model.Image
                                                    {
                                                        S3Object = new Amazon.Rekognition.Model.S3Object
                                                        {
                                                            Bucket = myBucketName,
                                                            // Name=""
                                                            // Name = s3FileName
                                                            Name = fileName
                                                        }
                                                    },
                                                    TargetImage = new Amazon.Rekognition.Model.Image
                                                    {
                                                        S3Object = new Amazon.Rekognition.Model.S3Object
                                                        {
                                                            Bucket = myBucketName,
                                                            // Name=""
                                                            Name = entry.Key
                                                        }
                                                    }
                                                });
                                                if (response.FaceMatches.Count > 0)
                                                {
                                                    image.Visible        = true;
                                                    dupImage.Src         = name;
                                                    existingImage.Src    = "https://s3.amazonaws.com/blazarstorage/" + entry.Key;;
                                                    lblMessage.Text      = "You are trying to upload the  image " + s3FileName + "  which  is  matching with  " + entry.Key;
                                                    lblMessage.ForeColor = System.Drawing.Color.Green;
                                                    IAmazonS3 s3 = new AmazonS3Client(Amazon.RegionEndpoint.USEast1);
                                                    s3.DeleteObject(myBucketName, fileName);
                                                    return;
                                                }
                                            }
                                            // }
                                        }
                                    } while (resGetObjFromBucket.IsTruncated == true);
                                }
                                catch (Exception ex)
                                {
                                    IAmazonS3 s3 = new AmazonS3Client(Amazon.RegionEndpoint.USEast1);
                                    s3.DeleteObject(myBucketName, fileName);
                                    lblMessage.Text      = ex.Message.ToString();
                                    lblMessage.ForeColor = System.Drawing.Color.Red;
                                }
                                // List< Face > lstOfFaces= GetListOfFaceInFaceCollection();
                                //if (lstOfFaces.Count >= 1)
                                //{
                                //   for(int i=0;i<lstOfFaces.Count;i++)
                                //   {
                                //        GetObjectRequest objReq = new GetObjectRequest();
                                //        GetObjectResponse res = new GetObjectResponse();


                                //        if (response.FaceMatches.Count > 0)
                                //        {
                                //            IAmazonS3 s3 = new AmazonS3Client();
                                //            s3.DeleteObject(myBucketName, s3FileName);

                                //        }
                                //      }



                                //}
                            }
                            else
                            {
                                lblMessage.Text = "Please upload again!";
                            }
                            //else
                            //{
                            //    //upload image into bucket--step 3
                            //    isUpload = sendMyFileToS3(st, myBucketName, s3DirectoryName, s3FileName);
                            //    if (isUpload == true)
                            //    {
                            //        //store image in a face collection
                            //        StoreFaceInCollection(@name, rekoClient, myBucketName);
                            //        lblMessage.Text = "successfully uploaded";
                            //        Response.Write("successfully uploaded");

                            //    }
                            //    else
                            //        lblMessage.Text = "error";
                            //}
                        }
                        else
                        {
                            lblMessage.Text = "Please upload a valid image!!!";
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                lblMessage.Text = ex.Message.ToString();
            }
        }
コード例 #9
0
    public static void Example()
    {
        float  similarityThreshold = 70F;
        String sourceImage         = "source.jpg";
        String targetImage         = "target.jpg";

        AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();

        Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageSource.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load source image: " + sourceImage);
            return;
        }

        Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image();
        try
        {
            using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read))
            {
                byte[] data = new byte[fs.Length];
                data = new byte[fs.Length];
                fs.Read(data, 0, (int)fs.Length);
                imageTarget.Bytes = new MemoryStream(data);
            }
        }
        catch (Exception)
        {
            Console.WriteLine("Failed to load target image: " + targetImage);
            return;
        }

        CompareFacesRequest compareFacesRequest = new CompareFacesRequest()
        {
            SourceImage         = imageSource,
            TargetImage         = imageTarget,
            SimilarityThreshold = similarityThreshold
        };

        // Call operation
        CompareFacesResponse compareFacesResponse = rekognitionClient.CompareFaces(compareFacesRequest);

        // Display results
        foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches)
        {
            ComparedFace face     = match.Face;
            BoundingBox  position = face.BoundingBox;
            Console.WriteLine("Face at " + position.Left
                              + " " + position.Top
                              + " matches with " + face.Confidence
                              + "% confidence.");
        }

        Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match");
        Console.WriteLine("Source image rotation: " + compareFacesResponse.SourceImageOrientationCorrection);
        Console.WriteLine("Target image rotation: " + compareFacesResponse.TargetImageOrientationCorrection);
    }