Example #1
0
        static async Task Main(string[] args)
        {
            if (args.Length != 1)
            {
                Console.WriteLine(@"Usage: FaceDetection C:\path\to\image.jpg");
                return;
            }

            string imageFilePath = args[0];

            Console.WriteLine($"Performing face detection on {imageFilePath}");
            Console.WriteLine();

            if (!File.Exists(imageFilePath))
            {
                Console.WriteLine("File does not exist");
                return;
            }

            try
            {
                string json = await FaceDetectionApi.MakeAnalysisRequest(imageFilePath);

                Console.WriteLine(json.PrettyPrint());
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            Console.WriteLine();
            Console.WriteLine("=== Press ENTER to Exit ===");
            Console.ReadLine();
        }
Example #2
0
        public IEnumerable <DetectAreaModel> Locations(byte[] image)
        {
            var results = new List <DetectAreaModel>();

            var api = new FaceDetectionApi(this._EndPointUrl);

            try
            {
                var target = new Client.Model.Image(image);
                var result = api.FaceDetectionLocationsPostWithHttpInfo(target);
                if (result.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return(null);
                }

                results.AddRange(result.Data.Select(area => new DetectAreaModel(area.Left,
                                                                                area.Top,
                                                                                area.Right - area.Left,
                                                                                area.Bottom - area.Top,
                                                                                0)));
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }

            return(results);
        }
Example #3
0
        private static void Main(string[] args)
        {
            if (args.Length != 2)
            {
                Console.WriteLine("[Error] FaceDetectionClient <url> <image file path>");
                return;
            }

            var url  = args[0];
            var file = args[1];

            if (!File.Exists(file))
            {
                Console.WriteLine($"[Error] '{file}' does not exist");
                return;
            }

            var api = new FaceDetectionApi(url);

            try
            {
                var image  = new Server.Model.Image(File.ReadAllBytes(file));
                var result = api.ApiFaceDetectionLocationsPostWithHttpInfo(image);
                if (result.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    Console.WriteLine($"[Error] API returns {result.StatusCode}");
                    return;
                }

                Console.WriteLine($"[Info] Find {result.Data.Count} faces");

                using (var ms = new MemoryStream(image.Data))
                    using (var bitmap = (Bitmap)System.Drawing.Image.FromStream(ms))
                        using (var g = Graphics.FromImage(bitmap))
                            using (var pen = new Pen(Color.Red, 2))
                            {
                                foreach (var area in result.Data)
                                {
                                    var x = area.Left;
                                    var y = area.Top;
                                    var w = area.Right - x;
                                    var h = area.Bottom - y;
                                    g.DrawRectangle(pen, x, y, w, h);
                                }

                                bitmap.Save("result.jpg");
                            }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
        private static void Main(string[] args)
        {
            if (args.Length != 2)
            {
                Console.WriteLine($"[Error] {nameof(FaceEncoding)} <url> <image file path>");
                return;
            }

            var url  = args[0];
            var file = args[1];

            if (!File.Exists(file))
            {
                Console.WriteLine($"[Error] '{file}' does not exist");
                return;
            }

            var faceDetectionApi = new FaceDetectionApi(url);
            var faceEncodingApi  = new FaceEncodingApi(url);

            try
            {
                var image             = new FaceRecognitionDotNet.Client.Model.Image(File.ReadAllBytes(file));
                var detectionResponse = faceDetectionApi.FaceDetectionLocationsPostWithHttpInfo(image);
                if (detectionResponse.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    Console.WriteLine($"[Error] {nameof(FaceDetectionApi)} returns {detectionResponse.StatusCode}");
                    return;
                }

                Console.WriteLine($"[Info] Find {detectionResponse.Data.Count} faces");

                foreach (var faceArea in detectionResponse.Data)
                {
                    using var bitmap = (Bitmap)Image.FromFile(file);
                    var w = faceArea.Right - faceArea.Left;
                    var h = faceArea.Bottom - faceArea.Top;
                    using var cropped = new Bitmap(w, h, bitmap.PixelFormat);
                    using (var g = Graphics.FromImage(cropped))
                        g.DrawImage(bitmap, new Rectangle(0, 0, w, h), new Rectangle(faceArea.Left, faceArea.Top, w, h), GraphicsUnit.Pixel);
                    using (var ms = new MemoryStream())
                    {
                        cropped.Save(ms, ImageFormat.Png);

                        var croppedImage     = new FaceRecognitionDotNet.Client.Model.Image(ms.ToArray());
                        var encodingResponse = faceEncodingApi.FaceEncodingEncodingPostWithHttpInfo(croppedImage);
                        if (encodingResponse.StatusCode != System.Net.HttpStatusCode.OK)
                        {
                            Console.WriteLine($"[Error] {nameof(FaceEncodingApi)} returns {encodingResponse.StatusCode}");
                            return;
                        }

                        Console.WriteLine($"[Info] Face Encoding has {encodingResponse.Data.Data.Count} length");
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
 public FaceDetectionApiTests()
 {
     instance = new FaceDetectionApi();
 }
        public async Task <string> Register(RegistrationViewModel registrationViewModel, byte[] image)
        {
            var faceDetectionApi    = new FaceDetectionApi(this._EndPointUrl);
            var faceEncodingApi     = new FaceEncodingApi(this._EndPointUrl);
            var faceRegistrationApi = new FaceRegistrationApi(this._EndPointUrl);

            try
            {
                var target       = new Client.Model.Image(image);
                var detectResult = await faceDetectionApi.FaceDetectionLocationsPostWithHttpInfoAsync(target);

                if (detectResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to invoke service");
                }

                if (detectResult.Data.Count == 0)
                {
                    return("Failed to detect face");
                }

                if (detectResult.Data.Count != 1)
                {
                    return("Detect multiple faces");
                }

                var area = detectResult.Data.First();

                await using var memoryStream = new MemoryStream(image);
                using var bitmap             = Image.FromStream(memoryStream);
                var x      = area.Left;
                var y      = area.Top;
                var width  = area.Right - area.Left;
                var height = area.Right - area.Left;
                using var cropped = new Bitmap(width, height, PixelFormat.Format24bppRgb);
                using var g       = Graphics.FromImage(cropped);
                g.DrawImage(bitmap, new Rectangle(0, 0, width, height), new Rectangle(x, y, width, height), GraphicsUnit.Pixel);

                await using var croppedMemoryStream = new MemoryStream();
                cropped.Save(croppedMemoryStream, ImageFormat.Png);
                target = new Client.Model.Image(croppedMemoryStream.ToArray());
                var encodingResult = await faceEncodingApi.FaceEncodingEncodingPostWithHttpInfoAsync(target);

                if (encodingResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to invoke service");
                }

                var registration = new Registration
                                   (
                    new Demographics(registrationViewModel.Id,
                                     registrationViewModel.FirstName,
                                     registrationViewModel.LastName,
                                     DateTime.UtcNow),
                    new Encoding(encodingResult.Data.Data),
                    new Client.Model.Image(croppedMemoryStream.ToArray())
                                   );

                var registrationResult = await faceRegistrationApi.FaceRegistrationRegisterPostWithHttpInfoAsync(registration);

                if (registrationResult.StatusCode != System.Net.HttpStatusCode.OK)
                {
                    return("Failed to register");
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }

            return(null);
        }