public override void Invoke(AWSCredentials creds, RegionEndpoint region, int maxItems) { AmazonRekognitionConfig config = new AmazonRekognitionConfig(); config.RegionEndpoint = region; ConfigureClient(config); AmazonRekognitionClient client = new AmazonRekognitionClient(creds, config); ListStreamProcessorsResponse resp = new ListStreamProcessorsResponse(); do { ListStreamProcessorsRequest req = new ListStreamProcessorsRequest { NextToken = resp.NextToken , MaxResults = maxItems }; resp = client.ListStreamProcessors(req); CheckError(resp.HttpStatusCode, "200"); foreach (var obj in resp.StreamProcessors) { AddObject(obj); } }while (!string.IsNullOrEmpty(resp.NextToken)); }
public async Task ExecuteAsync() { var credentials = new BasicAWSCredentials(this.options.AccessKey, this.options.SecretKey); var clientConfig = new AmazonRekognitionConfig { RegionEndpoint = this.options.Region, AllowAutoRedirect = true, DisableLogging = true, MaxConnectionsPerServer = null, LogMetrics = false, LogResponse = false, UseDualstackEndpoint = false, }; clientConfig.Validate(); using var client = new AmazonRekognitionClient(credentials, clientConfig); try { await client.DescribeCollectionAsync( new DescribeCollectionRequest { CollectionId = this.options.CollectionID }).ConfigureAwait(false); } catch (ResourceNotFoundException) { await client.CreateCollectionAsync( new CreateCollectionRequest { CollectionId = this.options.CollectionID }).ConfigureAwait(false); } var files = this.options.Directory.EnumerateFiles(this.options.Pattern, enumerationOptions); // Output CSV data: Console.WriteLine("FileName,FaceID"); foreach (FileInfo file in files) { using var stream = new DecoyMemoryStream(file.OpenRead(), leaveOpen: false); var result = await client.IndexFacesAsync(new IndexFacesRequest { CollectionId = this.options.CollectionID, DetectionAttributes = detectionAttributes, MaxFaces = 1, QualityFilter = QualityFilter.AUTO, Image = new Image { Bytes = stream } }).ConfigureAwait(false); if (result.FaceRecords != null && result.FaceRecords.Count > 0) { Console.WriteLine("{0},{1}", file.Name, result.FaceRecords[0].Face.FaceId); } else { Console.WriteLine("{0},{1}", file.Name, "NotDetected"); } } }
public DetectFacesResponse DetectFaces(DetectFaceParams dfp) { DetectFacesResponse resp = null; var conf = new AmazonRekognitionConfig() { RegionEndpoint = dfp.RegEndpoint }; using (recClient = new AmazonRekognitionClient(awsAccessKeyId, awsSecretAccessKey, conf)) { DetectFacesRequest detectFacesRequest = new DetectFacesRequest() { Image = new Image() { S3Object = new S3Object() { Name = dfp.PhotoName, Bucket = dfp.BucketName }, }, // Attributes can be "ALL" or "DEFAULT". // "DEFAULT": BoundingBox, Confidence, Landmarks, Pose, and Quality. // "ALL": See https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/Rekognition/TFaceDetail.html Attributes = new List <String>() { "ALL" } }; try { resp = recClient.DetectFaces(detectFacesRequest); if (resp == null) { throw new Exception("AmazonRekognitionClient DetectFaces method call return null."); } //bool hasAll = detectFacesRequest.Attributes.Contains("ALL"); //foreach (FaceDetail face in resp.Result.FaceDetails) //{ // Console.WriteLine("BoundingBox: top={0} left={1} width={2} height={3}", face.BoundingBox.Left, // face.BoundingBox.Top, face.BoundingBox.Width, face.BoundingBox.Height); // Console.WriteLine("Confidence: {0}\nLandmarks: {1}\nPose: pitch={2} roll={3} yaw={4}\nQuality: {5}", // face.Confidence, face.Landmarks.Count, face.Pose.Pitch, // face.Pose.Roll, face.Pose.Yaw, face.Quality); // if (hasAll) // Console.WriteLine("The detected face is estimated to be between " + // face.AgeRange.Low + " and " + face.AgeRange.High + " years old."); //} } catch (Exception e) { Console.WriteLine(e.Message); } } return(resp); }
protected IAmazonRekognition CreateClient(AWSCredentials credentials, RegionEndpoint region) { var config = new AmazonRekognitionConfig { RegionEndpoint = region }; Amazon.PowerShell.Utils.Common.PopulateConfig(this, config); this.CustomizeClientConfig(config); var client = new AmazonRekognitionClient(credentials, config); client.BeforeRequestEvent += RequestEventHandler; client.AfterResponseEvent += ResponseEventHandler; return(client); }
public async Task ExecuteAsync() { var credentials = new BasicAWSCredentials(this.options.AccessKey, this.options.SecretKey); var clientConfig = new AmazonRekognitionConfig { RegionEndpoint = this.options.Region, AllowAutoRedirect = true, DisableLogging = true, MaxConnectionsPerServer = null, LogMetrics = false, LogResponse = false, UseDualstackEndpoint = false, }; clientConfig.Validate(); using var client = new AmazonRekognitionClient(credentials, clientConfig); var files = this.options.Directory.EnumerateFiles(this.options.Pattern, enumerationOptions); // CSV column headers: Console.WriteLine("FileName,FileSize,Dimensions,TimeTaken,FaceID,Similarity"); var watch = new Stopwatch(); foreach (FileInfo file in files) { using var stream = new DecoyMemoryStream(file.OpenRead(), leaveOpen: false); string imageDimensions = null; using (var image = System.Drawing.Image.FromStream(stream, true, true)) { imageDimensions = $"{image.Width}x{image.Height}"; } stream.Position = 0L; watch.Restart(); var result = await client.SearchFacesByImageAsync(new SearchFacesByImageRequest { CollectionId = this.options.CollectionID, FaceMatchThreshold = 90f, QualityFilter = QualityFilter.AUTO, MaxFaces = 1, Image = new Image { Bytes = stream } }).ConfigureAwait(false); watch.Stop(); // CSV values: Console.Write("{0},{1},{2},{3},", file.Name, file.Length, imageDimensions, watch.ElapsedMilliseconds); if (result.FaceMatches != null && result.FaceMatches.Count > 0) { Console.WriteLine("{0},{1}", result.FaceMatches[0].Face.FaceId, result.FaceMatches[0].Similarity); } else { Console.WriteLine("null,0"); } } }