/// <summary> /// This function compares the input image with the existing ones in the dyFace Collection /// Function Name: "dyFaceRekognition" /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> public async Task <bool> RekognizeFace(string input, ILambdaContext context) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(); Image image = new Image() { S3Object = new S3Object() { Bucket = "dyface-test-bucket", Name = input } }; SearchFacesByImageRequest searchFaceRequest = new SearchFacesByImageRequest() { CollectionId = "dyFaceCollection", Image = image, FaceMatchThreshold = 70F, MaxFaces = 1 }; SearchFacesByImageResponse searchFaceResponse = await rekognitionClient.SearchFacesByImageAsync(searchFaceRequest); foreach (FaceMatch face in searchFaceResponse.FaceMatches) { if (face.Similarity >= 95) { return(true); } } //missing error handling - image does't exist etc return(false); }
private SearchFacesByImageResponse SearchPhotosByImage() { //var s3ObjectResponse = S3Client.GetObjectAsync("recongimages", _eTag); var getObjectRequest = new GetObjectRequest(); getObjectRequest.EtagToMatch = _eTag; getObjectRequest.BucketName = "recongimages"; var s3ObjectResponse = S3Client.GetObjectAsync(getObjectRequest); //var objectName = s3ObjectResponse.Result.Key; var rekognitionClient = new Amazon.Rekognition.AmazonRekognitionClient("AKIAJASZFMUMX6B4JUIQ", "4YDS6cYGyTkES76EwwbLU/0KL1O7lO8YQGpsi2zV", Amazon.RegionEndpoint.APSoutheast2); var requestSearch = new SearchFacesByImageRequest(); requestSearch.MaxFaces = 1; //requestSearch.CollectionId var s3Object = new Amazon.Rekognition.Model.S3Object(); s3Object.Name = s3ObjectResponse.Result.Key; var image = new Image(); image.S3Object = s3Object; requestSearch.Image = image; //requestSearch.FaceMatchThreshold var response = rekognitionClient.SearchFacesByImageAsync(requestSearch); return(response.Result); }
internal SearchFacesByImageResponse SearchFacesByImage(SearchFacesByImageRequest request) { var marshaller = new SearchFacesByImageRequestMarshaller(); var unmarshaller = SearchFacesByImageResponseUnmarshaller.Instance; return(Invoke <SearchFacesByImageRequest, SearchFacesByImageResponse>(request, marshaller, unmarshaller)); }
public static string RecogniseFaceMatchFromS3(string collectionId, string bucket, string photo) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(RegionEndpoint.APSouth1); // Get an image object from S3 bucket. Image image = new Image() { S3Object = new S3Object() { Bucket = bucket, Name = photo } }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 2 }; var searchFacesByImageResponse = rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest); Console.WriteLine("Faces matching largest face in image from " + photo); foreach (FaceMatch face in searchFacesByImageResponse.Result.FaceMatches) { Console.WriteLine("FaceId: " + face.Face.FaceId + ", Similarity: " + face.Similarity); return(face.Face.Confidence.ToString()); } return(""); }
public static void Example() { String collectionId = "MyCollection"; String bucket = "bucket"; String photo = "input.jpg"; AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(); // Get an image object from S3 bucket. Image image = new Image() { S3Object = new S3Object() { Bucket = bucket, Name = photo } }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 2 }; SearchFacesByImageResponse searchFacesByImageResponse = rekognitionClient.SearchFacesByImage(searchFacesByImageRequest); Console.WriteLine("Faces matching largest face in image from " + photo); foreach (FaceMatch face in searchFacesByImageResponse.FaceMatches) { Console.WriteLine("FaceId: " + face.Face.FaceId + ", Similarity: " + face.Similarity); } }
// snippet-start:[Rekognition.dotnetv3.SearchFacesMatchingImageExample] public static async Task Main() { string collectionId = "MyCollection"; string bucket = "bucket"; string photo = "input.jpg"; var rekognitionClient = new AmazonRekognitionClient(); // Get an image object from S3 bucket. var image = new Image() { S3Object = new S3Object() { Bucket = bucket, Name = photo, }, }; var searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 2, }; SearchFacesByImageResponse searchFacesByImageResponse = await rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest); Console.WriteLine("Faces matching largest face in image from " + photo); searchFacesByImageResponse.FaceMatches.ForEach(face => { Console.WriteLine($"FaceId: {face.Face.FaceId}, Similarity: {face.Similarity}"); }); }
/// <summary> /// Initiates the asynchronous execution of the SearchFacesByImage operation. /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the SearchFacesByImage operation.</param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The task object representing the asynchronous operation.</returns> public Task <SearchFacesByImageResponse> SearchFacesByImageAsync(SearchFacesByImageRequest request, System.Threading.CancellationToken cancellationToken = default(CancellationToken)) { var marshaller = new SearchFacesByImageRequestMarshaller(); var unmarshaller = SearchFacesByImageResponseUnmarshaller.Instance; return(InvokeAsync <SearchFacesByImageRequest, SearchFacesByImageResponse>(request, marshaller, unmarshaller, cancellationToken)); }
private List <Guid> SearchOneFace(string faceFileName) { List <Guid> facesData = null; try { Amazon.Rekognition.Model.Image image = new Amazon.Rekognition.Model.Image() { Bytes = new MemoryStream(System.IO.File.ReadAllBytes(faceFileName)) }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = awsCollectionId, Image = image, FaceMatchThreshold = awsFaceMatchThreshold, MaxFaces = 1000 }; using (AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(awsAccessKeyId, awsSecretAccessKey, awsRegionEndpoint)) { SearchFacesByImageResponse searchFacesByImageResponse = rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest).Result; if (searchFacesByImageResponse != null && searchFacesByImageResponse.FaceMatches.Count > 0) { facesData = new List <Guid>(); for (int f = 0; f < searchFacesByImageResponse.FaceMatches.Count; f++) { string dateMask = searchFacesByImageResponse.FaceMatches[f].Face.ExternalImageId; if (dateMask.Length > 7) { dateMask = dateMask.Substring(0, 8); if (dates != null && dates.Contains(dateMask)) { if (searchFacesByImageResponse.FaceMatches[f].Similarity >= awsSimilarityLevel) { Guid faceId; if (Guid.TryParse(searchFacesByImageResponse.FaceMatches[f].Face.FaceId, out faceId)) { if (!facesData.Contains(faceId)) { facesData.Add(faceId); } } } } } } } } } catch (Exception exc) { } if (facesData != null && facesData.Count == 0) { facesData = null; } return(facesData); }
private static async Task <SearchFacesByImageResponse> LookupImage(string collectionId, Image image) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Amazon.RegionEndpoint.USWest2); float similarityThreshold = 0F; // set to 0 to see all probability scores int maxResults = 100; SearchFacesByImageRequest request = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = similarityThreshold, MaxFaces = maxResults }; return(await rekognitionClient.SearchFacesByImageAsync(request)); }
private static void searchFace(Amazon.Rekognition.Model.Image image, AmazonRekognitionClient rekognitionClient) { String collectionId = "MyCollection"; SearchFacesByImageRequest request = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image }; SearchFacesByImageResponse response = rekognitionClient.SearchFacesByImage(request); foreach (FaceMatch face in response.FaceMatches) { Console.WriteLine("FaceId: " + face.Face.FaceId + ", Similarity: " + face.Similarity); } }
private List <FaceMatch> SearchCollectionForSourceImageFaces() { List <FaceMatch> result = new List <FaceMatch>(); Console.WriteLine("Searching target collection for matching source faces..."); foreach (var entry in DataSet.SourceImages) { Console.WriteLine("Attempting to match image {0}.", entry.Key); MemoryStream stream = new MemoryStream(); entry.Value.Save(stream, System.Drawing.Imaging.ImageFormat.Jpeg); Image requestImage = new Image() { Bytes = stream }; SearchFacesByImageRequest request = new SearchFacesByImageRequest() { Image = requestImage, CollectionId = CollectionName }; var watch = Stopwatch.StartNew(); SearchFacesByImageResponse response = Client.SearchFacesByImage(request); watch.Stop(); TimingResults.Add(new TimingModel("SearchCollectionForSourceImageFaces", entry.Key, watch.ElapsedMilliseconds)); MatchResults.Add(response); if (response.FaceMatches.Count > 0) { Console.WriteLine("Matching target face found for {0} with a confidence level of {1}.", entry.Key, response.SearchedFaceConfidence); } else { Console.WriteLine("No matching target face found for {0}.", entry.Key); } result.AddRange(response.FaceMatches); } Console.WriteLine("{0} out of {1} faces successfully matched.", result.Count, DataSet.SourceImages.Count); return(result); }
public static string RecogniseFaceMatchFromFileSystem(string collectionId, byte[] photo) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(RegionEndpoint.APSouth1); // Get an image object from File system. Image image = new Image() { Bytes = new MemoryStream(photo) }; //using (FileStream fs = new FileStream(photo, FileMode.Open, FileAccess.Read)) //{ // byte[] data = null; // data = new byte[fs.Length]; // fs.Read(data, 0, (int)fs.Length); // image.Bytes = new MemoryStream(data); //} SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 2 }; var searchFacesByImageResponse = rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest); Console.WriteLine("Faces matching largest face in image from " + photo); foreach (FaceMatch face in searchFacesByImageResponse.Result.FaceMatches) { Console.WriteLine("FaceId: " + face.Face.FaceId + ", Similarity: " + face.Similarity); return(face.Face.FaceId); } return(""); }
public async Task <string> Compare(byte[] imageBytes) { try { String collectionId = "PiscoMarketFaces"; //Decrypt Keys var key = "E546C8DF278CD5931069B522E695D4F2"; var rekognitionClient = new AmazonRekognitionClient(EncryptionUtilities.DecryptString(configuration.awsAccessKey, key), EncryptionUtilities.DecryptString(configuration.awsAccessSecret, key), Amazon.RegionEndpoint.USEast2); var image = new Image() { Bytes = new MemoryStream(imageBytes) }; var searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 1 }; SearchFacesByImageResponse searchFacesByImageResponse = await rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest); if (searchFacesByImageResponse.FaceMatches.Any()) { var faceMatch = searchFacesByImageResponse.FaceMatches.First(); return(faceMatch.Face.ExternalImageId); } return(null); } catch (Exception ex) { return(null); } }
public async void SearchFace() { String collectionId = "faceCollection"; String bucket = "face-identify"; String photo = "BrotherP.jpg"; string accessKey = "AKIAST4HFDODRNXMOAPJ"; string secretKey = "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"; AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(accessKey, secretKey, Amazon.RegionEndpoint.APSoutheast1); // Get an image object from S3 bucket. Image image = new Image() { S3Object = new S3Object() { Bucket = bucket, Name = photo } }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image, FaceMatchThreshold = 70F, MaxFaces = 2 }; SearchFacesByImageResponse searchFacesByImageResponse = await rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest); Console.WriteLine("Faces matching largest face in image from " + photo); foreach (FaceMatch face in searchFacesByImageResponse.FaceMatches) { Console.WriteLine("FaceId: " + face.Face.FaceId + ", Similarity: " + face.Similarity); } }
private void searchFacesMatch(Bitmap bitmap, MyDelegate my) { Amazon.Rekognition.Model.Image image = Utils.bitmapToAWSImage(bitmap); SearchFacesByImageRequest request = new SearchFacesByImageRequest() { CollectionId = collectionId, Image = image }; SearchFacesByImageResponse response = null; try { response = rekognitionClient.SearchFacesByImage(request); } catch (Exception e) { my(e.Message); //Console.WriteLine("cannot recognize human face"); return; } if (response.FaceMatches.Count != 0) { String name = response.FaceMatches[0].Face.FaceId; String similarity = response.FaceMatches[0].Similarity + ""; my(similarity); } else { my("nobody nobody but you"); } }
/// <summary> /// /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> public async Task FunctionHandler(S3Event input, ILambdaContext context) { try { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(); //Debug.WriteLine("Creating collection: " + FACE_COLLECTION_ID); //CreateCollectionRequest createCollectionRequest = new CreateCollectionRequest() //{ // CollectionId = FACE_COLLECTION_ID //}; //CreateCollectionResponse createCollectionResponse = rekognitionClient.CreateCollectionAsync(createCollectionRequest).Result; //Debug.WriteLine("CollectionArn : " + createCollectionResponse.CollectionArn); //Debug.WriteLine("Status code : " + createCollectionResponse.StatusCode); foreach (var record in input.Records) { if (!SupportedImageTypes.Contains(Path.GetExtension(record.S3.Object.Key))) { Debug.WriteLine($"Object {record.S3.Bucket.Name}:{record.S3.Object.Key} is not a supported image type"); continue; } Image image = new Image() { S3Object = new Amazon.Rekognition.Model.S3Object { Bucket = record.S3.Bucket.Name, Name = record.S3.Object.Key } }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = FACE_COLLECTION_ID, Image = image, FaceMatchThreshold = 90F, MaxFaces = 1 }; SearchFacesByImageResponse searchFacesByImageResponse = rekognitionClient.SearchFacesByImageAsync(searchFacesByImageRequest).Result; Debug.WriteLine("Faces matching largest face in image from " + record.S3.Object.Key); foreach (FaceMatch match in searchFacesByImageResponse.FaceMatches) { Debug.WriteLine("FaceId: " + match.Face.FaceId + ", Similarity: " + match.Similarity); // Initialize the Amazon Cognito credentials provider CognitoAWSCredentials credentials = new CognitoAWSCredentials( "us-east-1:6d711ae9-1084-4a71-9ef6-7551ca74ad0b", // Identity pool ID RegionEndpoint.USEast1 // Region ); var dynamoDbClient = new AmazonDynamoDBClient(credentials, RegionEndpoint.USEast1); Table customersTbl = Table.LoadTable(dynamoDbClient, "Customer"); GetItemOperationConfig config = new GetItemOperationConfig { AttributesToGet = new List <string> { "Id", "CustomerName", "FaceId", "StoreLocation" }, ConsistentRead = true }; for (int i = 1; i <= 5; i++) { Document retrievedCustomer = customersTbl.GetItemAsync(i, config).Result; if (retrievedCustomer["FaceId"].AsString() == match.Face.FaceId)//retrieved customer's faceID matches with the faceId currently being searched - we know who's in the store { //let us update customer's location var customer = new Document(); customer["Id"] = Int32.Parse(retrievedCustomer["Id"].AsString()); string location = ""; string cameraFeedFolder = "CAM-Exit"; string[] cameraFeedFolderPath = record.S3.Object.Key.Split("/"); if (cameraFeedFolderPath.Length > 0) { cameraFeedFolder = cameraFeedFolderPath[0]; } switch (cameraFeedFolder) { case "CAM-Entrance": location = "entrance"; break; case "CAM-Aisle1": location = "aisle1"; break; case "CAM-Aisle2": location = "aisle2"; break; case "CAM-Aisle3": location = "aisle3"; break; case "CAM-Aisle4": location = "aisle4"; break; case "CAM-Checkout": location = "checkout"; break; default: location = "entrance"; break; } customer["StoreLocation"] = location; // Optional parameters. UpdateItemOperationConfig updateConfig = new UpdateItemOperationConfig { // Get updated item in response. ReturnValues = ReturnValues.AllNewAttributes }; Document updatedCustomer = customersTbl.UpdateItemAsync(customer, updateConfig).Result; Console.WriteLine("UpdateMultipleAttributes: Printing item after updates ..."); PrintDocument(updatedCustomer); break; } } } } return; } catch (Exception ex) { throw new Exception("Deloitte Mart Exception " + ex.Message, ex); } }
public ActionResult RecoFaceUploadEPI(Arquivo arquivo, string UKFichaDeEPI, string UKEmpregado, string UKProduto) { try { Guid UKemp = Guid.Parse(UKEmpregado); Empregado empregado = EmpregadoBusiness.Consulta.FirstOrDefault(a => string.IsNullOrEmpty(a.UsuarioExclusao) && a.UniqueKey.Equals(UKemp)); var nome = empregado.Nome.Trim().Replace(" ", "") + ".jpg"; string Semelhanca = string.Empty; String resultado = string.Empty; HttpPostedFileBase arquivoPostado = null; foreach (string fileInputName in Request.Files) { arquivoPostado = Request.Files[fileInputName]; var target = new MemoryStream(); arquivoPostado.InputStream.CopyTo(target); arquivo.Conteudo = target.ToArray(); arquivo.UsuarioInclusao = CustomAuthorizationProvider.UsuarioAutenticado.Login; arquivo.DataInclusao = DateTime.Now; arquivo.Extensao = Path.GetExtension(arquivoPostado.FileName); arquivo.NomeLocal = arquivoPostado.FileName; byte[] inputImageData = arquivo.Conteudo; var inputImageStream = new MemoryStream(inputImageData); var item = arquivo.Conteudo; var input = arquivo.NomeLocal; var rekognitionClient = new AmazonRekognitionClient("AKIAIBLZ7KFAN6XG3NNA", "2nukFOTDN0zv/y2tzeCiLrAHM5TwbFgvEqqZA9zn", RegionEndpoint.USWest2); Image image = new Image() { Bytes = inputImageStream }; SearchFacesByImageRequest searchFacesByImageRequest = new SearchFacesByImageRequest() { CollectionId = "GrupoCEI", Image = image, FaceMatchThreshold = 70F, MaxFaces = 2 }; var contaFace = 0; try { SearchFacesByImageResponse searchFacesByImageResponse = rekognitionClient.SearchFacesByImage(searchFacesByImageRequest); List <FaceMatch> faceMatches = searchFacesByImageResponse.FaceMatches; BoundingBox searchedFaceBoundingBox = searchFacesByImageResponse.SearchedFaceBoundingBox; float searchedFaceConfidence = searchFacesByImageResponse.SearchedFaceConfidence; if (faceMatches.Count == 0) { contaFace = 2; } if (faceMatches.Count > 0) { foreach (FaceMatch face in faceMatches) { if (face != null && face.Face.ExternalImageId == nome) { //Extensions.GravaCookie("MensagemSucesso", "Empregado identificado com: '" + face.Similarity + "'de semlhança.", 10); //return Json(new { sucesso = "O arquivo foi anexado com êxito." }); // return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "AnaliseDeRisco") } }); Semelhanca = face.Similarity.ToString(); resultado = face.Face.ExternalImageId.ToString(); contaFace = 1; var validacao = ValidacoesBusiness.Consulta.FirstOrDefault(a => string.IsNullOrEmpty(a.UsuarioExclusao) && a.Registro.Equals(arquivo.NumRegistro) && a.NomeIndex.Equals(face.Face.ExternalImageId)); if (validacao == null) { ValidacaoFichaDeEpi val = new ValidacaoFichaDeEpi() { UKFichaDeEPI = Convert.ToString(arquivo.UKObjeto), NomeIndex = face.Face.ExternalImageId, }; ValidacaoFichaDeEpiBusiness.Inserir(val); } else { contaFace = 4; throw new Exception("Empregado já validou este documento!"); } } else { contaFace = 3; throw new Exception("Empregado com Nome diferente!"); } } Extensions.GravaCookie("MensagemSucesso", "Empregado '" + resultado + "' identificado com: '" + Semelhanca + "' de semelhança.", 10); } else { throw new Exception("Empregado não encontrado!"); } } catch (Exception) { if (contaFace == 2) { throw new Exception("Empregado não encontrado!"); } if (contaFace == 3) { throw new Exception("A imagem não corresponde com o empregado atual"); } if (contaFace == 4) { throw new Exception("Empregado já validou este documento!"); } else { throw new Exception("Essa não é uma imagem válida!"); } } } if (Semelhanca != null) { return(Json(new { sucesso = "O Empregado '" + resultado + "' foi analisado com êxito." })); } else { return(Json(new { sucesso = "Empregado não encontrado!" })); } } catch (Exception ex) { return(Json(new { erro = ex.Message })); } }