Beispiel #1
0
 public static void ImageToFace()
 {
     //Console.WriteLine("Detecting face..");
     Task.Run(async() =>
     {
         var faceDetector   = await FaceDetector.CreateAsync();
         var screenBitmap   = GetBitmapFromScreen();
         var softwareBitmap = await GetSoftwareBitmapFromBitmap(screenBitmap);
         if (!FaceDetector.IsBitmapPixelFormatSupported(softwareBitmap.BitmapPixelFormat))
         {
             //Console.WriteLine("Converting to supported bitmap pixel format..");
             //Console.WriteLine("srcBitmap Width={0}, Height={1}", screenBitmap.Width, screenBitmap.Height);
             //Console.WriteLine("dstBitmap Width={0}, Height={1}", softwareBitmap.PixelWidth, softwareBitmap.PixelHeight);
             softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, FaceDetector.GetSupportedBitmapPixelFormats().First());
             //Console.WriteLine("Converted successfully");
         }
         //Console.WriteLine(screenBitmap.PixelFormat);
         //Console.WriteLine(softwareBitmap.BitmapPixelFormat);
         screenBitmap = await GetBitmapFromSoftwareBitmap(softwareBitmap);
         //Console.WriteLine(screenBitmap.PixelFormat);
         //Console.WriteLine(softwareBitmap.BitmapPixelFormat);
         using (var g = Graphics.FromImage(screenBitmap))
         {
             var detectedFaces = await faceDetector.DetectFacesAsync(softwareBitmap);
             //Console.WriteLine("Detected faces: {0}", detectedFaces.Count);
             foreach (var detectedFace in detectedFaces)
             {
                 var facebox = detectedFace.FaceBox;
                 g.DrawRectangle(Pens.Red, new Rectangle((int)facebox.X, (int)facebox.Y, (int)facebox.Width, (int)facebox.Height));
                 //Console.WriteLine("Face at X={0}, Y={1}, Width={2}, Height={3}", facebox.X, facebox.Y, facebox.Width, facebox.Height);
             }
         }
         //screenBitmap.Save("screenbitmap" + DateTime.Now.Ticks + ".png", ImageFormat.Png);
     }).Wait();
 }
Beispiel #2
0
        internal void SetSample(SoftwareBitmap input)
        {
            if (!Activated)
            {
                return;
            }

            if (Sample == null)
            {
                lock (SampleLock)
                {
                    if (!SupportedBitmapPixelFormat.HasValue)
                    {
                        if (FaceDetector.IsBitmapPixelFormatSupported(input.BitmapPixelFormat))
                        {
                            SupportedBitmapPixelFormat = input.BitmapPixelFormat;
                        }
                        else
                        {
                            SupportedBitmapPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().First();
                        }
                    }

                    Sample = SoftwareBitmap.Copy(input);
                }
            }
        }
Beispiel #3
0
        public async Task <RecogniseResult> Recognise(Stream fileStream)
        {
            var randomAccessStream = fileStream.AsRandomAccessStream();

            var bitmapDecoder = await BitmapDecoder.CreateAsync(randomAccessStream);

            var rawBitmap = await bitmapDecoder.GetSoftwareBitmapAsync();

            var supportedBitmapFormats = FaceDetector.GetSupportedBitmapPixelFormats();
            var supportedFormatBitmap  = SoftwareBitmap.Convert(rawBitmap, supportedBitmapFormats.First());

            var faceDetector = await FaceDetector.CreateAsync();

            var faces = await faceDetector.DetectFacesAsync(supportedFormatBitmap);

            var result = new RecogniseResult();

            if (faces.Any())
            {
                result.Faces = faces.Count();

                var memoryStream = new InMemoryRandomAccessStream();

                var bitmapEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, memoryStream);

                bitmapEncoder.SetSoftwareBitmap(rawBitmap);
                bitmapEncoder.BitmapTransform.Bounds = faces.First().FaceBox;

                await bitmapEncoder.FlushAsync();

                result.FirstFace = memoryStream.AsStream();
            }

            return(result);
        }
Beispiel #4
0
        private async Task InitializeFaceDetection()
        {
            if (FaceDetector.IsSupported)
            {
                if (_faceDetector == null)
                {
                    _faceDetector = await FaceDetector.CreateAsync();

                    _faceDectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault();
                }
            }
            else
            {
                Debug.WriteLine("Face detection is not supported");
            }

            if (FaceTracker.IsSupported)
            {
                if (_faceTracker == null)
                {
                    _faceTracker = await FaceTracker.CreateAsync();

                    _faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault();
                }
            }
            else
            {
                Debug.WriteLine("Face tracking is not suppoted");
            }
        }
Beispiel #5
0
        private async Task InitializeFaceDetection()
        {
            if (FaceDetector.IsSupported)
            {
                if (faceDetector == null)
                {
                    faceDetector = await FaceDetector.CreateAsync();

                    faceDetectorSupportedPixelFormat = FaceDetector.GetSupportedBitmapPixelFormats().FirstOrDefault();
                }
            }
            else
            {
                Debug.WriteLine("Warning. FaceDetector is not supported on this device");
            }

            if (FaceTracker.IsSupported)
            {
                if (faceTracker == null)
                {
                    faceTracker = await FaceTracker.CreateAsync();

                    faceTrackerSupportedPixelFormat = FaceTracker.GetSupportedBitmapPixelFormats().FirstOrDefault();
                }
            }
            else
            {
                Debug.WriteLine("Warning. FaceTracking is not supported on this device");
            }
        }
        void InitialiseVideoFrameFromDetectorFormats()
        {
            var bitmapFormats = FaceDetector.GetSupportedBitmapPixelFormats();

            this.videoFrame = new VideoFrame(
                bitmapFormats.First(),
                (int)this.previewVideoSize.Width,
                (int)this.previewVideoSize.Height);
        }
Beispiel #7
0
        private async Task InitializeCamera()
        {
            _requestStopCancellationToken = new CancellationTokenSource();
            _captureElement = new CaptureElement();
            var videoCaptureDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            var camera = videoCaptureDevices.FirstOrDefault();
            MediaCaptureInitializationSettings initialisationSettings = new MediaCaptureInitializationSettings()
            {
                StreamingCaptureMode = StreamingCaptureMode.Video,
                VideoDeviceId        = camera.Id
            };

            _mediaCapture = new MediaCapture();
            await _mediaCapture.InitializeAsync(initialisationSettings);

            _captureElement.Source = _mediaCapture;
            await _mediaCapture.StartPreviewAsync();

            var videoProperties = (_mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties);
            var videoSize       = new Rect(0, 0, videoProperties.Width, videoProperties.Height);
            var detector        = await FaceDetector.CreateAsync();

            var bitmap = FaceDetector.GetSupportedBitmapPixelFormats().First();

            try
            {
                await Task.Run(async() =>
                {
                    VideoFrame frame       = new VideoFrame(bitmap, (int)videoSize.Width, (int)videoSize.Height);
                    TimeSpan?lastFrameTime = null;
                    while (true)
                    {
                        if (!_requestStopCancellationToken.Token.IsCancellationRequested)
                        {
                            await _mediaCapture.GetPreviewFrameAsync(frame);

                            if ((!lastFrameTime.HasValue) || (lastFrameTime != frame.RelativeTime))
                            {
                                var detectedFaces = await detector.DetectFacesAsync(frame.SoftwareBitmap);
                                if (detectedFaces.Count == 1)
                                {
                                    var convertedRgba16Bitmap         = SoftwareBitmap.Convert(frame.SoftwareBitmap, BitmapPixelFormat.Rgba16);
                                    InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream();
                                    BitmapEncoder encoder             = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);
                                    encoder.SetSoftwareBitmap(convertedRgba16Bitmap);
                                    await encoder.FlushAsync();

                                    var detectedPerson = await _faceService.DetectFace(stream.AsStream());

                                    if (detectedPerson != null && detectedPerson.PersonId.HasValue)
                                    {
                                        _userService.PersonId = detectedPerson.PersonId.Value;
                                        var user = await _userService.GetModelAsync();
                                        if (user == null)
                                        {
                                            user          = new UserProfileModel().RandomData(detectedPerson.Gender);
                                            user.PersonId = detectedPerson.PersonId.Value;
                                            user.FaceIds.Add(detectedPerson.FaceId.Value);
                                            user = await _userService.AddUserAsync(user);
                                        }
                                        await UserViewModel.SetValuesAsync(User, user);
                                    }
                                    else
                                    {
                                        // bug: when a person was not detected, the stream gets disposed
                                        //stream.Seek(0);
                                        stream  = new InMemoryRandomAccessStream();
                                        encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);
                                        encoder.SetSoftwareBitmap(convertedRgba16Bitmap);
                                        await encoder.FlushAsync();

                                        // TODO: ask new user for initial profile data
                                        var user      = new UserProfileModel().RandomData(detectedPerson.Gender);
                                        user.PersonId = await _faceService.CreatePersonAsync(user.FullName);
                                        var faceIds   = new List <Guid>();
                                        faceIds.Add(await _faceService.AddFaceAsync(user.PersonId, stream.AsStream()));
                                        user.FaceIds.AddRange(faceIds);
                                        user = await _userService.AddUserAsync(user);
                                        await UserViewModel.SetValuesAsync(User, user);
                                    }

                                    await Task.Delay(CHECK_INTERVAL * 1000, _requestStopCancellationToken.Token);
                                }
                            }
                            lastFrameTime = frame.RelativeTime;
                        }
                    }
                }, _requestStopCancellationToken.Token);
            }
            catch (Microsoft.ProjectOxford.Face.FaceAPIException fex)
            {
                Debug.WriteLine(fex.ErrorMessage);
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            if (_requestStopCancellationToken.IsCancellationRequested)
            {
                await _mediaCapture.StopPreviewAsync();

                _captureElement.Source = null;
                _requestStopCancellationToken.Dispose();
            }
        }
Beispiel #8
0
        private async Task <Tuple <Image <byte>, IList <PointF> > > PrepBitmapAsync(SoftwareBitmap bitmap)
        {
            if (bitmap.PixelHeight % 2 != 0)
            {
                var resized = new SoftwareBitmap(bitmap.BitmapPixelFormat, bitmap.PixelWidth, bitmap.PixelHeight + 1);
                bitmap.CopyTo(resized);
                bitmap = resized;
            }

            Rectangle firstFace;

            try
            {
                var detector = await FaceDetector.CreateAsync();

                var formats         = FaceDetector.GetSupportedBitmapPixelFormats();
                var convertedBitmap = SoftwareBitmap.Convert(bitmap, formats.First());
                var detected        = await detector.DetectFacesAsync(convertedBitmap);

                var faces = detected
                            .Select(x => x.FaceBox)
                            .Select(x => new Rectangle((int)x.X, (int)x.X + (int)x.Width, (int)x.Y, (int)x.Y + (int)x.Height));
                if (!faces.Any())
                {
                    return(null);
                }
                firstFace = faces.First();
            }
            catch (Exception)
            {
                Debugger.Break();
                throw;
            }

            IList <PointF> points;
            var            image = ConvertTo.Image.FromSoftwareBitmap(bitmap);

            try
            {
                if (alignmentor == null)
                {
                    using (var stream = ResourceManager.GetStream(ResourceKey.AsmAlignment))
                    {
                        alignmentor = FaceAlignmentorFactory.Create(FaceAlignmentType.Asm87Points, stream);
                    }
                }
                var grayImage = new ImageGray(image);
                points = alignmentor.Align(grayImage, firstFace).ToList();
                if (!points.Any())
                {
                    return(null);
                }
            }
            catch (Exception)
            {
                Debugger.Break();
                throw;
            }

            return(new Tuple <Image <byte>, IList <PointF> >(image, points));
        }