コード例 #1
0
 public FaceEncodingApiTests()
 {
     instance = new FaceEncodingApi();
 }
コード例 #2
0
        private static void Main(string[] args)
        {
            if (args.Length != 2)
            {
                Console.WriteLine($"[Error] {nameof(FaceEncoding)} <url> <image file path>");
                return;
            }

            var url  = args[0];
            var file = args[1];

            if (!File.Exists(file))
            {
                Console.WriteLine($"[Error] '{file}' does not exist");
                return;
            }

            var faceDetectionApi = new FaceDetectionApi(url);
            var faceEncodingApi  = new FaceEncodingApi(url);

            try
            {
                var image             = new FaceRecognitionDotNet.Client.Model.Image(File.ReadAllBytes(file));
                var detectionResponse = faceDetectionApi.FaceDetectionLocationsPostWithHttpInfo(image);
                if (detectionResponse.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    Console.WriteLine($"[Error] {nameof(FaceDetectionApi)} returns {detectionResponse.StatusCode}");
                    return;
                }

                Console.WriteLine($"[Info] Find {detectionResponse.Data.Count} faces");

                foreach (var faceArea in detectionResponse.Data)
                {
                    using var bitmap = (Bitmap)Image.FromFile(file);
                    var w = faceArea.Right - faceArea.Left;
                    var h = faceArea.Bottom - faceArea.Top;
                    using var cropped = new Bitmap(w, h, bitmap.PixelFormat);
                    using (var g = Graphics.FromImage(cropped))
                        g.DrawImage(bitmap, new Rectangle(0, 0, w, h), new Rectangle(faceArea.Left, faceArea.Top, w, h), GraphicsUnit.Pixel);
                    using (var ms = new MemoryStream())
                    {
                        cropped.Save(ms, ImageFormat.Png);

                        var croppedImage     = new FaceRecognitionDotNet.Client.Model.Image(ms.ToArray());
                        var encodingResponse = faceEncodingApi.FaceEncodingEncodingPostWithHttpInfo(croppedImage);
                        if (encodingResponse.StatusCode != System.Net.HttpStatusCode.OK)
                        {
                            Console.WriteLine($"[Error] {nameof(FaceEncodingApi)} returns {encodingResponse.StatusCode}");
                            return;
                        }

                        Console.WriteLine($"[Info] Face Encoding has {encodingResponse.Data.Data.Count} length");
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
コード例 #3
0
        public async Task <string> Register(RegistrationViewModel registrationViewModel, byte[] image)
        {
            var faceDetectionApi    = new FaceDetectionApi(this._EndPointUrl);
            var faceEncodingApi     = new FaceEncodingApi(this._EndPointUrl);
            var faceRegistrationApi = new FaceRegistrationApi(this._EndPointUrl);

            try
            {
                var target       = new Client.Model.Image(image);
                var detectResult = await faceDetectionApi.FaceDetectionLocationsPostWithHttpInfoAsync(target);

                if (detectResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to invoke service");
                }

                if (detectResult.Data.Count == 0)
                {
                    return("Failed to detect face");
                }

                if (detectResult.Data.Count != 1)
                {
                    return("Detect multiple faces");
                }

                var area = detectResult.Data.First();

                await using var memoryStream = new MemoryStream(image);
                using var bitmap             = Image.FromStream(memoryStream);
                var x      = area.Left;
                var y      = area.Top;
                var width  = area.Right - area.Left;
                var height = area.Right - area.Left;
                using var cropped = new Bitmap(width, height, PixelFormat.Format24bppRgb);
                using var g       = Graphics.FromImage(cropped);
                g.DrawImage(bitmap, new Rectangle(0, 0, width, height), new Rectangle(x, y, width, height), GraphicsUnit.Pixel);

                await using var croppedMemoryStream = new MemoryStream();
                cropped.Save(croppedMemoryStream, ImageFormat.Png);
                target = new Client.Model.Image(croppedMemoryStream.ToArray());
                var encodingResult = await faceEncodingApi.FaceEncodingEncodingPostWithHttpInfoAsync(target);

                if (encodingResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to invoke service");
                }

                var registration = new Registration
                                   (
                    new Demographics(registrationViewModel.Id,
                                     registrationViewModel.FirstName,
                                     registrationViewModel.LastName,
                                     DateTime.UtcNow),
                    new Encoding(encodingResult.Data.Data),
                    new Client.Model.Image(croppedMemoryStream.ToArray())
                                   );

                var registrationResult = await faceRegistrationApi.FaceRegistrationRegisterPostWithHttpInfoAsync(registration);

                if (registrationResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to register");
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }

            return(null);
        }