public async Task <CompareFacesResponse> CompareAsync(MemoryStream srcImg, MemoryStream trgtImg) { try { var sourceImg = new Image() { Bytes = srcImg }; var targetImg = new Image() { Bytes = trgtImg }; var request = new CompareFacesRequest() { SimilarityThreshold = similarityThreshold, SourceImage = sourceImg, TargetImage = targetImg }; return(await rekognitionClient.CompareFacesAsync(request)); } catch (Exception e) { Console.WriteLine(e.Message); } return(null); }
// snippet-start:[Rekognition.dotnetv3.CompareFacesExample] public static async Task Main() { float similarityThreshold = 70F; string sourceImage = "source.jpg"; string targetImage = "target.jpg"; var rekognitionClient = new AmazonRekognitionClient(); Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image(); try { using FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read); byte[] data = new byte[fs.Length]; fs.Read(data, 0, (int)fs.Length); imageSource.Bytes = new MemoryStream(data); } catch (Exception) { Console.WriteLine($"Failed to load source image: {sourceImage}"); return; } Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image(); try { using FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read); byte[] data = new byte[fs.Length]; data = new byte[fs.Length]; fs.Read(data, 0, (int)fs.Length); imageTarget.Bytes = new MemoryStream(data); } catch (Exception ex) { Console.WriteLine($"Failed to load target image: {targetImage}"); Console.WriteLine(ex.Message); return; } var compareFacesRequest = new CompareFacesRequest { SourceImage = imageSource, TargetImage = imageTarget, SimilarityThreshold = similarityThreshold, }; // Call operation var compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest); // Display results compareFacesResponse.FaceMatches.ForEach(match => { ComparedFace face = match.Face; BoundingBox position = face.BoundingBox; Console.WriteLine($"Face at {position.Left} {position.Top} matches with {match.Similarity}% confidence."); }); Console.WriteLine($"Found {compareFacesResponse.UnmatchedFaces.Count} face(s) that did not match."); }
public async Task <bool> AuthenticateUserByFace(byte[] targetImage) //FileStream targetImage { float similarityThreshold = 90F; String sourceImage = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/123456.jpeg"; // String targetImage = "https://hackathonimagedump.s3.us-east-2.amazonaws.com/HappyFace.jpeg"; AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIAX2ZTBCX4OX6XH77Q", "X/FcCoEFyuIl5+hmwE+IVMk4t1089mgf0jIQI7Xo", RegionEndpoint.USWest2); Amazon.Rekognition.Model.Image imageSource = new Amazon.Rekognition.Model.Image(); try { var webClient = new WebClient(); byte[] imageBytes = webClient.DownloadData(sourceImage); imageSource.Bytes = new MemoryStream(imageBytes); } catch { } Amazon.Rekognition.Model.Image imageTarget = new Amazon.Rekognition.Model.Image(); try { imageTarget.Bytes = new MemoryStream(targetImage); } catch { } CompareFacesRequest compareFacesRequest = new CompareFacesRequest() { SourceImage = imageSource, TargetImage = imageTarget, SimilarityThreshold = similarityThreshold }; // Call operation CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest); if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK) { if (compareFacesResponse.SourceImageFace.Confidence > 90F) { return(true); } else { return(false); } } else { return(false); } }
public async Task <bool> IsSimillarFaceOnSourceImage(HubConnection hubConnection) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient("AKIA3GGXKFL7TG4ARQ5F", "iXqBARX00AiblMAAfvOIp6tKdwRrd/bQlvTicUcq", Amazon.RegionEndpoint.EUWest1); Image imageSource = new Image(); using (FileStream fs = new FileStream(sourceImage, FileMode.Open, FileAccess.Read)) { byte[] data = new byte[fs.Length]; fs.Read(data, 0, (int)fs.Length); imageSource.Bytes = new MemoryStream(data); } Image imageTarget = new Image(); using (FileStream fs = new FileStream(targetImage, FileMode.Open, FileAccess.Read)) { byte[] data = new byte[fs.Length]; data = new byte[fs.Length]; fs.Read(data, 0, (int)fs.Length); imageTarget.Bytes = new MemoryStream(data); } CompareFacesRequest compareFacesRequest = new CompareFacesRequest() { SourceImage = imageSource, TargetImage = imageTarget, SimilarityThreshold = similarityThreshold }; try { CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest); foreach (CompareFacesMatch match in compareFacesResponse.FaceMatches) { ComparedFace face = match.Face; BoundingBox position = face.BoundingBox; Console.WriteLine("Face at " + position.Left + " " + position.Top + " matches with " + match.Similarity + "% confidence."); await hubConnection.SendAsync("HideWindows"); } Console.WriteLine("There was " + compareFacesResponse.UnmatchedFaces.Count + " face(s) that did not match"); return(true); } catch (Exception) { return(false); } }
private static async Task <CompareFacesResponse> Compare(Image image1, Image image2) { AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Amazon.RegionEndpoint.USWest2); float similarityThreshold = 0F; // set to 0 to see all probability scores CompareFacesRequest compareFacesRequest = new CompareFacesRequest() { SourceImage = image1, TargetImage = image2, SimilarityThreshold = similarityThreshold }; return(await rekognitionClient.CompareFacesAsync(compareFacesRequest)); }
public async Task <bool> AuthenticateUserByFace(byte[] targetImage) { try { float similarityThreshold = 90F; CustomerDetails custInfo = JsonConvert.DeserializeObject <CustomerDetails>(TempData["UserInfo"].ToString()); string sourceImage = custInfo.body.CustomerDetails.ImageUrl; AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(Environment.GetEnvironmentVariable("ACCESS_KEY_ID"), Environment.GetEnvironmentVariable("SECRET_ACCESS_KEY"), RegionEndpoint.USWest2); Image imageSource = new Image(); var webClient = new WebClient(); byte[] imageBytes = webClient.DownloadData(sourceImage); imageSource.Bytes = new MemoryStream(imageBytes); Image imageTarget = new Image(); imageTarget.Bytes = new MemoryStream(targetImage); CompareFacesRequest compareFacesRequest = new CompareFacesRequest() { SourceImage = imageSource, TargetImage = imageTarget, SimilarityThreshold = similarityThreshold }; // Call operation CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(compareFacesRequest); if (compareFacesResponse.HttpStatusCode == HttpStatusCode.OK) { if (compareFacesResponse.FaceMatches.Count > 0 && compareFacesResponse.FaceMatches.Count < 2) { return(true); } else { return(false); } } else { return(false); } } catch { throw new Exception(); } }
public async Task <FaceMatchResponse> CompareFacesAsync(string sourceImage, string targetImage) { // Converte a imagem fonte em um objeto MemoryStream var imageSource = new Amazon.Rekognition.Model.Image(); imageSource.Bytes = _serviceUtils.ConvertImageToMemoryStream(sourceImage); // Converte a imagem alvo em um objeto MemoryStream var imageTarget = new Amazon.Rekognition.Model.Image(); imageTarget.Bytes = _serviceUtils.ConvertImageToMemoryStream(targetImage); // Configura o objeto que fará o request para o AWS Rekognition // A propriedade SimilarityThreshold ajusta o nível de similaridade na comparação das imagens var request = new CompareFacesRequest { SourceImage = imageSource, TargetImage = imageTarget, SimilarityThreshold = 80f }; // Faz a chamada do serviço de CompareFaces var response = await _rekognitionClient.CompareFacesAsync(request); // Verifica se houve algum match nas imagens var hasMatch = response.FaceMatches.Any(); // Se não houve match ele retorna um objeto não encontrado if (!hasMatch) { return(new FaceMatchResponse(hasMatch, null, string.Empty)); } // Com a imagem fonte e os parâmetros de retorno do match contornamos o rosto encontrado na imagem var fileName = _serviceUtils.Drawing(imageSource.Bytes, response.SourceImageFace); // Pega o percentual de similaridade da imagem encontrada var similarity = response.FaceMatches.FirstOrDefault().Similarity; // Retorna o objeto com as informações encontradas e com a URL para verificar a imagem return(new FaceMatchResponse(hasMatch, similarity, fileName)); }
/// <summary> /// A simple function that takes a string and does a ToUpper /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> public async Task <bool> FunctionHandler(String input, ILambdaContext context) { var rekognitionClient = new AmazonRekognitionClient(); var array = input.Split(new char[] { '#' }, StringSplitOptions.RemoveEmptyEntries); string name = (array[0] + "/" + array[0] + ".jpg"); var response = await rekognitionClient.CompareFacesAsync(new CompareFacesRequest { SimilarityThreshold = 90, SourceImage = new Image { S3Object = new S3Object { Bucket = "s3rekognition", Name = name } }, TargetImage = new Image { S3Object = new S3Object { Bucket = "s3rekognition", Name = array[1] } } }); foreach (CompareFacesMatch mach in response.FaceMatches) { if (mach.Similarity > 90) { return(true); } return(false); } return(false); }
private static async Task Main(string[] args) { const string AWS_ACCESS_KEY_ID = "AWS_ACCESS_KEY_ID"; const string AWS_SECRET_ACCESS_KEY = "AWS_SECRET_ACCESS_KEY"; Console.WriteLine("Hello World!"); var self = await File.ReadAllBytesAsync("assets\\self.jpg"); var front = await File.ReadAllBytesAsync("assets\\front.png"); var back = await File.ReadAllBytesAsync("assets\\back.png"); var command = new AnalizeDocumentCommand { Self = self, Back = back, Front = front }; var region = RegionEndpoint.USEast1; var client = new AmazonRekognitionClient(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, region); #region Analiza se é documento using (var stream = new MemoryStream(command.Back)) { var request = new DetectLabelsRequest { Image = new Image { Bytes = stream } }; var response = await client.DetectLabelsAsync(request); var labels = response.Labels; foreach (var label in labels) { var accuracy = Accuracy.GetAccuracy(label.Confidence); if (DocumentTypes.IsValidDocument(label.Name)) { if (accuracy.IsLow) { Console.WriteLine("Não é um documento"); } if (accuracy.IsMedium) { Console.WriteLine("Pode ser que seja um documento"); } if (accuracy.IsHigh) { Console.WriteLine("É muito provável que seja um documento"); } break; } } } #endregion #region Compara com a self using (var source = new MemoryStream(command.Self)) using (var target = new MemoryStream(command.Front)) { var request = new CompareFacesRequest { SourceImage = new Image { Bytes = source }, TargetImage = new Image { Bytes = target } }; var response = await client.CompareFacesAsync(request); var faces = response.FaceMatches; if (faces.Count != 1) { Console.WriteLine("Resultado inconsistente"); } var accuracy = Accuracy.GetAccuracy(faces.First().Similarity); if (accuracy.IsLow) { Console.WriteLine("Esse documento não da mesma pessoa"); } if (accuracy.IsMedium) { Console.WriteLine("Pode ser que este documento seja da mesma pessoa"); } if (accuracy.IsHigh) { Console.WriteLine("É muito provável que este documento seja da mesma pessoa"); } } #endregion #region Verifica se é do portador válido using (var stream = new MemoryStream(command.Back)) { var request = new DetectTextRequest { Image = new Image { Bytes = stream } }; var response = await client.DetectTextAsync(request); var texts = response.TextDetections; foreach (var text in texts) { var accuracy = Accuracy.GetAccuracy(text.Confidence); if ("CPF".Equals(text.DetectedText, StringComparison.InvariantCultureIgnoreCase)) { if (accuracy.IsLow) { Console.WriteLine("não contém um número de CPF"); } if (accuracy.IsMedium) { Console.WriteLine("Pode ser que contenha um número de CPF"); } if (accuracy.IsHigh) { Console.WriteLine("É muito provável que contenha um número de CPF"); } break; } } } #endregion Console.WriteLine("That's all folks!"); }
public static async Task <string> FunctionHandler(String photo) { String bucket = "moodanalysis"; //ArrayList result = new ArrayList(); string result = ""; AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient(); // Recognizes User's face CompareFacesRequest CFR = new CompareFacesRequest() { //SimilarityThreshold = 50, SourceImage = new Image() { S3Object = new S3Object() { Name = "referencePhoto.jpg", Bucket = bucket }, }, TargetImage = new Image() { S3Object = new S3Object() { Name = photo, Bucket = bucket }, }, }; CompareFacesResponse compareFacesResponse = await rekognitionClient.CompareFacesAsync(CFR); string howManyFaces = ""; if (compareFacesResponse.FaceMatches.Count == 0) { return(""); } //int index = 0, bestIndex = 0; var bestMatch = compareFacesResponse.FaceMatches[0]; float bestMatchResult = compareFacesResponse.FaceMatches[0].Similarity; BoundingBox bestBoundingBox = compareFacesResponse.FaceMatches[0].Face.BoundingBox; foreach (var faceMatch in compareFacesResponse.FaceMatches) { howManyFaces += faceMatch.Similarity + ","; if (bestMatchResult < faceMatch.Similarity) { bestMatch = faceMatch; bestBoundingBox = faceMatch.Face.BoundingBox; bestMatchResult = faceMatch.Similarity; } } // Detects emotions of faces in photo DetectFacesRequest detectFacesRequest = new DetectFacesRequest() { Image = new Image() { S3Object = new S3Object() { Name = photo, Bucket = bucket }, }, Attributes = new List <String>() { "ALL" } }; DetectFacesResponse detectFacesResponse = await rekognitionClient.DetectFacesAsync(detectFacesRequest); //int i = 0; foreach (FaceDetail face in detectFacesResponse.FaceDetails) { if (face.BoundingBox.Height == bestBoundingBox.Height && face.BoundingBox.Left == bestBoundingBox.Left && face.BoundingBox.Top == bestBoundingBox.Top && face.BoundingBox.Width == bestBoundingBox.Width) { //var emotQuery = FilterEmotions(face, IsLowConfidence); FilterEmotions filter = delegate(FaceDetail faceFilter, ConfidenceFilterDelegate confFilter) { return(faceFilter.Emotions.FindAll(n => confFilter(n)).ToList()); }; var emotQuery = filter(face, IsLowConfidence); //IEnumerable<Emotion> emotQuery = // from faceEmotion in face.Emotions // where faceEmotion.Confidence > 10 // select faceEmotion; // GRAB THE EMOTION foreach (Emotion emot in emotQuery) { result += emot.Type + ","; } break; } } //delete the last , if (result.Length != 0) { result = result.Substring(0, result.Length - 1); } return(result); }