Example #1
0
        public MainPage()
        {
            this.InitializeComponent();
            var serviceClient = ServiceClient.CreateFromConnectionString("");

            Application.Current.Suspending += Application_Suspending;

            var green  = new BitmapImage(new Uri("ms-appx:///Assets/Green.png"));
            var orange = new BitmapImage(new Uri("ms-appx:///Assets/Orange.png"));
            var red    = new BitmapImage(new Uri("ms-appx:///Assets/Red.png"));

            var countdown = 5;
            var status    = string.Empty;

            timer.Tick += async(s, e) =>
            {
                CountDown.Text = $"{countdown:0}";
                Status.Text    = status;
                if (countdown <= 0)
                {
                    countdown = 5;

                    using (var faceFrame = await mediaCapture.GetPreviewFrameAsync(new VideoFrame(BitmapPixelFormat.Bgra8, (int)PreviewControl.ActualWidth, (int)PreviewControl.ActualHeight)))
                        using (var faceStream = new InMemoryRandomAccessStream())
                            using (var faceClient = new FaceServiceClient("", "https://westeurope.api.cognitive.microsoft.com/face/v1.0"))
                            {
                                var faceEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, faceStream);

                                faceEncoder.SetSoftwareBitmap(faceFrame.SoftwareBitmap);
                                await faceEncoder.FlushAsync();

                                var faces = await faceClient.DetectAsync(faceStream.AsStreamForRead(),
                                                                         returnFaceLandmarks : true,
                                                                         returnFaceAttributes : new[] {
                                    FaceAttributeType.Emotion,
                                    FaceAttributeType.Glasses,
                                    FaceAttributeType.Smile,
                                    FaceAttributeType.FacialHair,
                                    FaceAttributeType.Gender,
                                    FaceAttributeType.Hair
                                });

                                Lamp.Visibility = Visibility.Collapsed;
                                var message = new
                                {
                                    Name       = "brightness",
                                    Parameters = new
                                    {
                                        green  = 0,
                                        orange = 0,
                                        red    = 0
                                    }
                                };

                                if (faces.Length > 0)
                                {
                                    var glasses    = faces[0].FaceAttributes.Glasses;
                                    var smile      = faces[0].FaceAttributes.Smile;
                                    var emotions   = faces[0].FaceAttributes.Emotion;
                                    var facialHair = faces[0].FaceAttributes.FacialHair;
                                    var gender     = faces[0].FaceAttributes.Gender;
                                    var hair       = faces[0].FaceAttributes.Hair;

                                    if (glasses != Glasses.NoGlasses)
                                    {
                                        if (smile <= 0.1d)
                                        {
                                            Lamp.Source = red;
                                            message     = new
                                            {
                                                Name       = "brightness",
                                                Parameters = new
                                                {
                                                    green  = 0,
                                                    orange = 0,
                                                    red    = 255
                                                }
                                            };
                                        }
                                        else if (smile <= 0.5d)
                                        {
                                            Lamp.Source = orange;
                                            message     = new
                                            {
                                                Name       = "brightness",
                                                Parameters = new
                                                {
                                                    green  = 0,
                                                    orange = 255,
                                                    red    = 0
                                                }
                                            };
                                        }
                                        else
                                        {
                                            Lamp.Source = green;
                                            message     = new
                                            {
                                                Name       = "brightness",
                                                Parameters = new
                                                {
                                                    green  = 255,
                                                    orange = 0,
                                                    red    = 0
                                                }
                                            };
                                        }
                                        Lamp.Visibility = Visibility.Visible;
                                    }

                                    status  = $"Glasses:  {glasses}{Environment.NewLine}";
                                    status += $"Smile:    {smile:0.000}{Environment.NewLine}{Environment.NewLine}";
                                    status += $"Gender:   {gender}{Environment.NewLine}{Environment.NewLine}";
                                    status += $"Anger:    {emotions.Anger:0.000}{Environment.NewLine}";
                                    status += $"Contempt: {emotions.Contempt:0.000}{Environment.NewLine}";
                                    status += $"Disgust:  {emotions.Disgust:0.000}{Environment.NewLine}";
                                    status += $"Fear:     {emotions.Fear:0.000}{Environment.NewLine}";
                                    status += $"Happiness:{emotions.Happiness:0.000}{Environment.NewLine}";
                                    status += $"Neutral:  {emotions.Neutral:0.000}{Environment.NewLine}";
                                    status += $"Sadness:  {emotions.Sadness:0.000}{Environment.NewLine}";
                                    status += $"Surprise: {emotions.Surprise:0.000}{Environment.NewLine}{Environment.NewLine}";
                                    status += $"Beard:    {facialHair.Beard:0.000}{Environment.NewLine}";
                                    status += $"Moustache:{facialHair.Moustache:0.000}{Environment.NewLine}";
                                    status += $"Sideburns:{facialHair.Sideburns:0.000}{Environment.NewLine}";
                                    status += $"Bald:     {hair.Bald:0.000}";
                                }
                                else
                                {
                                    status = string.Empty;
                                }
                                await serviceClient.SendAsync("", new Message(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(message))));
                            }
                }
                countdown--;
            };
            timer.Interval = TimeSpan.FromSeconds(1);
        }
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            // If state is not Streaming, return.
            if (_state != StreamingState.Streaming)
            {
                return;
            }

            // If there has a process still running, return.
            if (!_semaphoreSlim.Wait(0))
            {
                return;
            }

            const BitmapPixelFormat PixelFormat = BitmapPixelFormat.Nv12;

            try
            {
                using (VideoFrame currentFrame = new VideoFrame(PixelFormat, (int)_videoProperties.Width, (int)_videoProperties.Height))
                {
                    // Get current preview frame from _mediaCaputre and copy into currentFrame.
                    await _mediaCapture.GetPreviewFrameAsync(currentFrame);

                    // Detected face by _faceTracker.
                    IList <DetectedFace> builtinFaces = await _faceTracker.ProcessNextFrameAsync(currentFrame);

                    SoftwareBitmap tempBitmap = SoftwareBitmap.Convert(currentFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8);

                    if (builtinFaces.Count != 0)
                    {
                        var frameSize = new Size(currentFrame.SoftwareBitmap.PixelWidth, currentFrame.SoftwareBitmap.PixelHeight);
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            ShowFromBuiltIn(frameSize, builtinFaces);
                        });

                        // Get picture from videoframe.
                        IRandomAccessStream stream  = new InMemoryRandomAccessStream();
                        BitmapEncoder       encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

                        encoder.SetSoftwareBitmap(tempBitmap);
                        await encoder.FlushAsync();

                        CustomFaceModel customFaces = await _faceApiHelper.GetIdentifySingleResultAsync(stream.AsStream());


                        if (customFaces != null)
                        {
                            await _dataHelper.ChangeAttendStatusAsync(customFaces.Name, true);

                            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                                      ShowLoginSuccess(customFaces));
                        }
                        //await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        //    ShowFromFaceApi(frameSize, customFaces));
                    }
                    else
                    {
                        await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                                  PaintingCanvas.Children.Clear());
                    }
                }
            }
            catch (Microsoft.ProjectOxford.Face.FaceAPIException faceEx)
            {
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                          ShowAlertHelper.ShowDialog(faceEx.ErrorMessage, faceEx.ErrorCode));
            }
            catch (Exception ex)
            {
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                                          ShowAlertHelper.ShowDialog(ex.Message));
            }
            finally
            {
                _semaphoreSlim.Release();
            }
        }
        private async void Timer_Tick(object sender, object e)
        {
            var previewProperties = mc.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            var videoFrame        = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);
            var frame             = await mc.GetPreviewFrameAsync(videoFrame);

            var bitmap = new WriteableBitmap(frame.SoftwareBitmap.PixelWidth, frame.SoftwareBitmap.PixelHeight);

            frame.SoftwareBitmap.CopyToBuffer(bitmap.PixelBuffer);
            var resized = bitmap.Resize(frame.SoftwareBitmap.PixelWidth / 8, frame.SoftwareBitmap.PixelHeight / 8, WriteableBitmapExtensions.Interpolation.Bilinear);

            var result_sol = "";
            var result_sag = "";
            var result_üst = "";
            var result_alt = "";
            var beyaz      = 250;
            var sol        = 0;
            var sag        = 0;
            var ust        = 0;
            var alt        = 0;
            var hataPayi   = 20;
            var yatayOrt   = resized.PixelWidth / 2;
            var dikeyOrt   = resized.PixelHeight / 2;

            for (int x = 0; x < resized.PixelWidth; x += 1)
            {
                for (int y = 0; y < resized.PixelHeight; y += 1)
                {
                    var color = resized.GetPixel(x, y);

                    byte c = (byte)((color.R + color.B + color.G) / 3);

                    if (c >= beyaz)
                    {
                        if (x < yatayOrt)
                        {
                            sol++;
                        }
                        else
                        {
                            sag++;
                        }

                        if (y < dikeyOrt)
                        {
                            ust++;
                        }
                        else
                        {
                            alt++;
                        }
                    }
                }
            }

            if (sol > sag + hataPayi)
            {
                result_sol = "sol";
                if (sol != sag)
                {
                    pulse_x = pulse_x + 0.2; if (pulse_x >= 4)
                    {
                        pulse_x = 4;
                    }
                    _servomotor_x.SetPulse(pulse_x);
                }
                else
                {
                }
            }
            if (sag > sol + hataPayi)
            {
                result_sag = "sağ";
                if (sag != sol)
                {
                    pulse_x = pulse_x - 0.2; if (pulse_x <= 0.01)
                    {
                        pulse_x = 0.01;
                    }
                    _servomotor_x.SetPulse(pulse_x);
                }
                else
                {
                }
            }
            if (ust > alt + hataPayi)
            {
                result_üst = "üst";
                if (ust != alt)
                {
                    pulse_y = pulse_y + 0.2; if (pulse_y >= 4)
                    {
                        pulse_y = 4;
                    }
                    _servomotor_y.SetPulse(pulse_y);
                }
                else
                {
                }
            }
            if (alt > ust + hataPayi)
            {
                result_alt = "alt";
                if (alt != ust)
                {
                    pulse_y = pulse_y - 0.2; if (pulse_y <= 0.01)
                    {
                        pulse_y = 0.01;
                    }
                    _servomotor_y.SetPulse(pulse_y);
                }
                else
                {
                }
            }
            lblResult_sag.Text = result_sag + sag;
            lblResult_sol.Text = result_sol + sol;
            lblResult_üst.Text = result_üst + ust;
            lblResult_alt.Text = result_alt + alt;
        }
        public async Task StartScanningAsync(Action <MobileResult> scanCallback, MobileBarcodeScanningOptions options = null)
        {
            if (stopping)
            {
                return;
            }

            displayRequest.RequestActive();

            isAnalyzing     = true;
            ScanCallback    = scanCallback;
            ScanningOptions = options ?? MobileBarcodeScanningOptions.Default;

            topText.Text    = TopText ?? string.Empty;
            bottomText.Text = BottomText ?? string.Empty;

            if (UseCustomOverlay)
            {
                gridCustomOverlay.Children.Clear();
                if (CustomOverlay != null)
                {
                    gridCustomOverlay.Children.Add(CustomOverlay);
                }

                gridCustomOverlay.Visibility  = Visibility.Visible;
                gridDefaultOverlay.Visibility = Visibility.Collapsed;
            }
            else
            {
                gridCustomOverlay.Visibility  = Visibility.Collapsed;
                gridDefaultOverlay.Visibility = Visibility.Visible;
            }

            // Find which device to use
            var preferredCamera = await GetFilteredCameraOrDefaultAsync(ScanningOptions);

            if (preferredCamera == null)
            {
                System.Diagnostics.Debug.WriteLine("No camera available");
                isMediaCaptureInitialized = false;
                return;
            }

            if (preferredCamera.EnclosureLocation == null || preferredCamera.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Unknown)
            {
                // No information on the location of the camera, assume it's an external camera, not integrated on the device.
                externalCamera = true;
            }
            else
            {
                // Camera is fixed on the device.
                externalCamera = false;

                // Only mirror the preview if the camera is on the front panel.
                mirroringPreview = preferredCamera.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front;
            }

            mediaCapture = new MediaCapture();

            // Initialize the capture with the settings above
            try
            {
                await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings
                {
                    StreamingCaptureMode = StreamingCaptureMode.Video,
                    VideoDeviceId        = preferredCamera.Id
                });

                isMediaCaptureInitialized = true;
            }
            catch (UnauthorizedAccessException)
            {
                System.Diagnostics.Debug.WriteLine("Denied access to the camera");
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine("Exception when init MediaCapture: {0}", ex);
            }

            if (!isMediaCaptureInitialized)
            {
                return;
            }

            // Set the capture element's source to show it in the UI
            captureElement.Source        = mediaCapture;
            captureElement.FlowDirection = mirroringPreview ? FlowDirection.RightToLeft : FlowDirection.LeftToRight;

            // Start the preview
            await mediaCapture.StartPreviewAsync();

            // Get all the available resolutions for preview
            var availableProperties  = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview);
            var availableResolutions = new List <CameraResolution>();

            foreach (var ap in availableProperties)
            {
                var vp = (VideoEncodingProperties)ap;
                System.Diagnostics.Debug.WriteLine("Camera Preview Resolution: {0}x{1}", vp.Width, vp.Height);
                availableResolutions.Add(new CameraResolution {
                    Width = (int)vp.Width, Height = (int)vp.Height
                });
            }
            CameraResolution previewResolution = null;

            if (ScanningOptions.CameraResolutionSelector != null)
            {
                previewResolution = ScanningOptions.CameraResolutionSelector(availableResolutions);
            }

            // If the user did not specify a resolution, let's try and find a suitable one
            if (previewResolution == null)
            {
                // Loop through all supported sizes
                foreach (var sps in availableResolutions)
                {
                    // Find one that's >= 640x360 but <= 1000x1000
                    // This will likely pick the *smallest* size in that range, which should be fine
                    if (sps.Width >= 640 && sps.Width <= 1000 && sps.Height >= 360 && sps.Height <= 1000)
                    {
                        previewResolution = new CameraResolution
                        {
                            Width  = sps.Width,
                            Height = sps.Height
                        };
                        break;
                    }
                }
            }

            if (previewResolution == null)
            {
                previewResolution = availableResolutions.LastOrDefault();
            }

            if (previewResolution == null)
            {
                System.Diagnostics.Debug.WriteLine("No preview resolution available. Camera may be in use by another application.");
                return;
            }

            MobileBarcodeScanner.Log("Using Preview Resolution: {0}x{1}", previewResolution.Width, previewResolution.Height);

            // Find the matching property based on the selection, again
            var chosenProp = availableProperties.FirstOrDefault(ap => ((VideoEncodingProperties)ap).Width == previewResolution.Width && ((VideoEncodingProperties)ap).Height == previewResolution.Height);

            // Pass in the requested preview size properties
            // so we can set them at the same time as the preview rotation
            // to save an additional set property call
            await SetPreviewRotationAsync(chosenProp);

            // *after* the preview is setup, set this so that the UI layout happens
            // otherwise the preview gets stuck in a funny place on screen
            captureElement.Stretch = Stretch.UniformToFill;

            await SetupAutoFocus();

            var zxing = ScanningOptions.BuildBarcodeReader();

            timerPreview = new Timer(async(state) => {
                var delay = ScanningOptions.DelayBetweenAnalyzingFrames;

                if (stopping || processing || !isAnalyzing ||
                    (mediaCapture == null || mediaCapture.CameraStreamState != Windows.Media.Devices.CameraStreamState.Streaming))
                {
                    timerPreview.Change(delay, Timeout.Infinite);
                    return;
                }

                processing = true;

                SoftwareBitmapLuminanceSource luminanceSource = null;

                try
                {
                    // Get preview
                    var frame = await mediaCapture.GetPreviewFrameAsync(videoFrame);

                    // Create our luminance source
                    luminanceSource = new SoftwareBitmapLuminanceSource(frame.SoftwareBitmap);
                } catch (Exception ex)
                {
                    MobileBarcodeScanner.Log("GetPreviewFrame Failed: {0}", ex);
                }

                ZXing.Result result = null;

                try
                {
                    // Try decoding the image
                    if (luminanceSource != null)
                    {
                        result = zxing.Decode(luminanceSource);
                    }
                }
                catch (Exception ex)
                {
                    MobileBarcodeScanner.Log("Warning: zxing.Decode Failed: {0}", ex);
                }

                // Check if a result was found
                if (result != null && !string.IsNullOrEmpty(result.Text))
                {
                    if (!ContinuousScanning)
                    {
                        delay = Timeout.Infinite;
                        await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { await StopScanningAsync(); });
                    }
                    else
                    {
                        delay = ScanningOptions.DelayBetweenContinuousScans;
                    }

                    LastScanResult = new MobileResult(result);
                    ScanCallback(LastScanResult);
                }

                processing = false;

                timerPreview.Change(delay, Timeout.Infinite);
            }, null, ScanningOptions.InitialDelayBeforeAnalyzingFrames, Timeout.Infinite);
        }
        /// <summary>
        /// This is event handler for 'Extract' button.
        /// Captures image from camera ,recognizes text and displays it.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private async void ExtractButton_Tapped(object sender, Windows.UI.Xaml.Input.TappedRoutedEventArgs e)
        {
            //Get information about the preview.
            var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            int videoFrameWidth   = (int)previewProperties.Width;
            int videoFrameHeight  = (int)previewProperties.Height;

            // In portrait modes, the width and height must be swapped for the VideoFrame to have the correct aspect ratio and avoid letterboxing / black bars.
            if (!externalCamera && (displayInformation.CurrentOrientation == DisplayOrientations.Portrait || displayInformation.CurrentOrientation == DisplayOrientations.PortraitFlipped))
            {
                videoFrameWidth  = (int)previewProperties.Height;
                videoFrameHeight = (int)previewProperties.Width;
            }

            // Create the video frame to request a SoftwareBitmap preview frame.
            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, videoFrameWidth, videoFrameHeight);

            // Capture the preview frame.
            using (var currentFrame = await mediaCapture.GetPreviewFrameAsync(videoFrame))
            {
                // Collect the resulting frame.
                SoftwareBitmap bitmap = currentFrame.SoftwareBitmap;

                OcrEngine ocrEngine = OcrEngine.TryCreateFromLanguage(ocrLanguage);

                if (ocrEngine == null)
                {
                    rootPage.NotifyUser(ocrLanguage.DisplayName + " is not supported.", NotifyType.ErrorMessage);

                    return;
                }

                var imgSource = new WriteableBitmap(bitmap.PixelWidth, bitmap.PixelHeight);
                bitmap.CopyToBuffer(imgSource.PixelBuffer);
                PreviewImage.Source = imgSource;

                var ocrResult = await ocrEngine.RecognizeAsync(bitmap);

                // Used for text overlay.
                // Prepare scale transform for words since image is not displayed in original format.
                var scaleTrasform = new ScaleTransform
                {
                    CenterX = 0,
                    CenterY = 0,
                    ScaleX  = PreviewControl.ActualWidth / bitmap.PixelWidth,
                    ScaleY  = PreviewControl.ActualHeight / bitmap.PixelHeight
                };

                if (ocrResult.TextAngle != null)
                {
                    // If text is detected under some angle in this sample scenario we want to
                    // overlay word boxes over original image, so we rotate overlay boxes.
                    TextOverlay.RenderTransform = new RotateTransform
                    {
                        Angle   = (double)ocrResult.TextAngle,
                        CenterX = PreviewImage.ActualWidth / 2,
                        CenterY = PreviewImage.ActualHeight / 2
                    };
                }

                // Iterate over recognized lines of text.
                foreach (var line in ocrResult.Lines)
                {
                    // Iterate over words in line.
                    foreach (var word in line.Words)
                    {
                        // Define the TextBlock.
                        var wordTextBlock = new TextBlock()
                        {
                            Text  = word.Text,
                            Style = (Style)this.Resources["ExtractedWordTextStyle"]
                        };

                        WordOverlay wordBoxOverlay = new WordOverlay(word);

                        // Keep references to word boxes.
                        wordBoxes.Add(wordBoxOverlay);

                        // Define position, background, etc.
                        var overlay = new Border()
                        {
                            Child = wordTextBlock,
                            Style = (Style)this.Resources["HighlightedWordBoxHorizontalLine"]
                        };

                        // Bind word boxes to UI.
                        overlay.SetBinding(Border.MarginProperty, wordBoxOverlay.CreateWordPositionBinding());
                        overlay.SetBinding(Border.WidthProperty, wordBoxOverlay.CreateWordWidthBinding());
                        overlay.SetBinding(Border.HeightProperty, wordBoxOverlay.CreateWordHeightBinding());

                        // Put the filled textblock in the results grid.
                        TextOverlay.Children.Add(overlay);
                    }
                }

                rootPage.NotifyUser("Image processed using " + ocrEngine.RecognizerLanguage.DisplayName + " language.", NotifyType.StatusMessage);
            }

            UpdateWordBoxTransform();

            PreviewControl.Visibility = Visibility.Collapsed;
            Image.Visibility          = Visibility.Visible;
            ExtractButton.Visibility  = Visibility.Collapsed;
            CameraButton.Visibility   = Visibility.Visible;
        }
        private async Task FaceDetectAsync(FaceDetector detector, MediaCapture capture, CancellationToken token)
        {
            if (detector == null || capture == null || token == null)
            {
                throw new ArgumentNullException();
            }

            var previewProperties = capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
            var videoFrame        = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);

            int width  = (int)previewProperties.Width;
            int height = (int)previewProperties.Height;

            FaceDetectResult result = null;

            var stopWatch = Stopwatch.StartNew();

            {
                using (var currentFrame = await capture.GetPreviewFrameAsync(videoFrame))
                    using (var softwareBitmap = currentFrame.SoftwareBitmap)
                    {
                        if (softwareBitmap == null)
                        {
                            return;
                        }

                        // SoftwareBitmap -> byte array
                        var buffer = new byte[4 * width * height];
                        softwareBitmap.CopyToBuffer(buffer.AsBuffer());

                        token.ThrowIfCancellationRequested();

                        // Detect face
                        result = detector.Detect(buffer, width, height);

                        token.ThrowIfCancellationRequested();
                    }
            }
            stopWatch.Stop();

            videoFrame.Dispose();

            // Draw result to Canvas
            await Dispatcher.RunAsync(CoreDispatcherPriority.High, () =>
            {
                FaceDrawCanvas.Width  = width;
                FaceDrawCanvas.Height = height;

                // Draw fps
                FpsTextBlock.Text = (1000 / stopWatch.ElapsedMilliseconds) + "fps";

                // Draw face point
                if (_faceDrawer != null && result != null)
                {
                    List <FaceDetectData> faces = new List <FaceDetectData>();
                    foreach (var f in result.Faces)
                    {
                        FaceDetectData data = new FaceDetectData();
                        data.FaceRect       = f.FaceRect;

                        foreach (var p in f.FacePoints)
                        {
                            data.FaceLandmarks.Add(p);
                        }

                        faces.Add(data);
                    }

                    _faceDrawer.DrawToCanvas(FaceDrawCanvas, faces);
                }
            });
        }
        public async Task StartPreviewAsync(QR_Code_Scanner.Business.ComboboxItem comboboxItem)
        {
            FrameSourceInformation frameSourceInformation = new FrameSourceInformation();

            try
            {
                mediaCapture = new MediaCapture();


                var settings = new MediaCaptureInitializationSettings()
                {
                    StreamingCaptureMode = StreamingCaptureMode.Video
                };
                if (comboboxItem != null)
                {
                    settings.VideoDeviceId = comboboxItem.ID;
                    frameSourceInformation = comboboxItem.MediaFrameSourceInformation;
                }
                else
                {
                    if (availableColorCameras == null)
                    {
                        var frameSourceInformations = await GetFrameSourceInformationAsync();

                        frameSourceInformation = frameSourceInformations.First();
                        availableColorCameras  = await GetFrameSourceGroupsAsync(frameSourceInformation);
                    }
                    settings.VideoDeviceId = availableColorCameras.First().Id;
                }

                qrAnalyzerCancellationTokenSource = new CancellationTokenSource();
                try
                {
                    await mediaCapture.InitializeAsync(settings);
                }
                catch (Exception ex)
                {
                    MessageManager.ShowMessageToUserAsync("Tried to initialize a color camera but failed to do so.");
                }
                List <VideoEncodingProperties> availableResolutions = null;
                try
                {
                    availableResolutions = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Where(properties => properties is VideoEncodingProperties).Select(properties => (VideoEncodingProperties)properties).ToList();
                }
                catch (Exception ex)
                {
                    MessageManager.ShowMessageToUserAsync("No resolutions could be detected, trying default mode.");
                }

                VideoEncodingProperties bestVideoResolution = this.findBestResolution(availableResolutions);

                if (bestVideoResolution != null)
                {
                    await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, bestVideoResolution);
                }

                displayRequest.RequestActive();
            }
            catch (UnauthorizedAccessException)
            {
                // This will be thrown if the user denied access to the camera in privacy settings
                MessageManager.ShowMessageToUserAsync("The app was denied access to the camera");
                return;
            }

            try
            {
                this.ScanForQRcodes         = true;
                previewWindowElement.Source = mediaCapture;
                await mediaCapture.StartPreviewAsync();

                isPreviewing = true;
                var imgProp = new ImageEncodingProperties
                {
                    Subtype = "BMP",
                    Width   = (uint)imgCaptureWidth,
                    Height  = (uint)imgCaptureHeight
                };
                var bcReader          = new BarcodeReader();
                var qrCaptureInterval = 200;

                var torch = mediaCapture.VideoDeviceController.TorchControl;
                var exposureCompensationControl = mediaCapture.VideoDeviceController.ExposureCompensationControl;

                if (torch.Supported)
                {
                    torch.Enabled = false;
                }
                //if (exposureCompensationControl.Supported) {
                //    var maxSupported = exposureCompensationControl.Max;
                //    var minSupported = exposureCompensationControl.Min;
                //    var middleExposure = (maxSupported + minSupported) / 2;
                //    var quarterExposure = (middleExposure + minSupported) / 2;
                //    await exposureCompensationControl.SetValueAsync(quarterExposure);
                //}

                // Get information about the preview
                var previewProperties = mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

                while (!qrAnalyzerCancellationTokenSource.IsCancellationRequested && qrAnalyzerCancellationTokenSource != null && qrAnalyzerCancellationTokenSource.Token != null)
                {
                    //try capture qr code here
                    if (ScanForQRcodes)
                    {
                        VideoFrame videoFrameFormatPlaceholder = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);
                        await mediaCapture.GetPreviewFrameAsync(videoFrameFormatPlaceholder);
                        await findQRinImageAsync(bcReader, videoFrameFormatPlaceholder);

                        videoFrameFormatPlaceholder.Dispose();
                        videoFrameFormatPlaceholder = null;
                    }

                    //await Task.Delay(qrCaptureInterval, qrAnalyzerCancellationTokenSource.Token);
                    var   delayTask        = Task.Delay(qrCaptureInterval, qrAnalyzerCancellationTokenSource.Token);
                    var   continuationTask = delayTask.ContinueWith(task => { });
                    await continuationTask;
                }
            }
            catch (System.IO.FileLoadException)
            {
                mediaCapture.CaptureDeviceExclusiveControlStatusChanged += mediaCapture_CaptureDeviceExclusiveControlStatusChanged;
            }
            catch (System.ObjectDisposedException)
            {
                Debug.WriteLine("object was disposed");
            }
            catch (Exception)
            {
                Debug.WriteLine("another exception occurred.");
            }
        }
Example #8
0
        // Fujimaki Add
        private async Task PlaySoundAsync()
        {
            // Get information about the preview
            var previewProperties = _mediaCapture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

            // Create the video frame to request a SoftwareBitmap preview frame
            var videoFrame = new VideoFrame(BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);

            try
            {
                // Capture the preview frame
                using (var currentFrame = await _mediaCapture.GetPreviewFrameAsync(videoFrame))
                {
                    try
                    {
                        // Collect the resulting frame
                        SoftwareBitmap previewFrame = currentFrame.SoftwareBitmap;
                        double[,] hues = null;  // 色合いのヒストグラムデータ
                        GetHues(previewFrame, out hues);
                        double[,] noiseRemovedHues = null;
                        RemoveNoise(hues, out noiseRemovedHues);
                        double[] filter = new double[_smoothFilterLength]; // ヒストグラムの平滑化に用いるフィルタ
                        for (int i = 0; i < filter.Length; i++)
                        {
                            filter[i] = 1.0;
                        }
                        SmoothHist(noiseRemovedHues, filter); // ヒストグラムを平滑化する
                        int[,] huePeaks = null;               // 色合いのピークデータ
                        GetHuePeaks(noiseRemovedHues, out huePeaks);
                        ResetPlayEnable();
                        for (int i = 0; i < huePeaks.GetLength(0); i++)
                        {
                            for (int j = 0; j < huePeaks.GetLength(1); j++)
                            {
                                if (huePeaks[i, j] > 0)
                                {
                                    string colorName = "";
                                    int    gakkiId   = -1;
                                    GetColorNameFromHueValue((double)i, out colorName, out gakkiId);
                                    if (gakkiId >= 0)
                                    {
                                        _playEnable[gakkiId, j] = true;
                                    }
                                }
                            }
                        }

                        string txt = "";
                        for (int i = 0; i < _playEnable.GetLength(0); i++)
                        {
                            for (int j = 0; j < _playEnable.GetLength(1); j++)
                            {
                                if (_playEnable[i, j] && _soundPlayer[i, j, _currentSoundIdx[i, j]].PlaybackSession.PlaybackState == MediaPlaybackState.Paused)
                                {
                                    await ThreadPool.RunAsync(
                                        new WorkItemHandler((IAsyncAction act) =>
                                    {
                                        _soundPlayer[i, j, _currentSoundIdx[i, j]].Play();
                                        _currentSoundIdx[i, j] = ++_currentSoundIdx[i, j] % _numSameFile;
                                    }),
                                        WorkItemPriority.Normal,
                                        WorkItemOptions.TimeSliced
                                        );

                                    txt += _gakki[i].gakkiName + ":" + j.ToString() + "|";
                                }
                            }
                        }
                        this.ValueText.Text = txt;
                    }
                    catch (Exception excep)
                    {
                        int a = 0;
                    }
                }
            }
            catch (Exception excep)
            {
                int a = 0;
            }
        }
Example #9
0
        async void ProcessCurrentVideoFrame(object sender, object e)
        {
            // If a lock is being held it means we're still waiting for processing work on the previous frame to complete.
            // In this situation, don't wait on the semaphore but exit immediately.
            if (!_isStreaming || !_frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            try
            {
                using (var previewFrame = new VideoFrame(BitmapPixelFormat.Nv12,
                                                         (int)_videoProperties.Width,
                                                         (int)_videoProperties.Height))
                {
                    await _mediaManager.GetPreviewFrameAsync(previewFrame);

                    IList <DetectedFace> faces = null;

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await _faceTracker.ProcessNextFrameAsync(previewFrame);
                    }

                    //// Create our visualization using the frame dimensions and face results but run it on the UI thread.
                    var previewFrameSize = new Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                    {
                        SetupVisualization(previewFrameSize, faces);

                        if (_isProcessing)
                        {
                            return;
                        }

                        var emotions = await CaptureEmotionAsync();
                        if (emotions.IsNullOrEmpty() == false)
                        {
                            var mostProbable =
                                emotions.ToResults()
                                .Where(result => result != Result.Empty)
                                .FirstOrDefault();

                            if (mostProbable == null)
                            {
                                _messageLabel.Text = string.Empty;
                                _emoticon.Text     = string.Empty;
                            }
                            else
                            {
                                _emoticon.Text = Emoticons.From(mostProbable.Emotion);

                                var current = _messageLabel.Text;
                                var message = EmotionMessages.Messages[mostProbable.Emotion].RandomElement();
                                while (current == message)
                                {
                                    message = EmotionMessages.Messages[mostProbable.Emotion].RandomElement();
                                }
                                _messageLabel.Text = message;
                                await _speechEngine.SpeakAsync(message, _speaker);

                                ++_captureCounter;
                                if (_captureCounter >= MaxCaptureBeforeReset)
                                {
                                    await ChangeStreamStateAsync(false);
                                }
                            }
                        }
                    });
                }
            }
            catch (Exception ex) when(DebugHelper.IsHandled <MainPage>(ex))
            {
            }
            finally
            {
                _frameProcessingSemaphore.Release();
            }
        }
        private async void ProcessVideoFrame(ThreadPoolTimer timer)
        {
            if (!frameProcessingSimaphore.Wait(0))
            {
                // We are already doing something
                return;
            }

            try
            {
                IEnumerable <DetectedFace> faces = null;

                const BitmapPixelFormat inputPixelFormat = BitmapPixelFormat.Nv12;

                Face[] globalFaces = null;

                using (var previewFrame = new VideoFrame(inputPixelFormat, (int)videoProperties.Width, (int)videoProperties.Height))
                {
                    await mediaCapture.GetPreviewFrameAsync(previewFrame);

                    if (FaceTracker.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await faceTracker.ProcessNextFrameAsync(previewFrame);

                        if (!facesExistInFrame)
                        {
                            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                            {
                                // Enable the Train feature and disable the other buttons
                                PageYes.Visibility = Visibility.Collapsed;
                                PageNo.Visibility  = Visibility.Collapsed;
                                TrainMe.Visibility = Visibility.Visible;
                            });
                        }

                        if (faces.Any())
                        {
                            if (!facesExistInFrame)
                            {
                                facesExistInFrame = true;

                                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                                {
                                    // Enable the Yes/No buttons.  Disable the Train Button
                                    PageYes.Visibility = Visibility.Visible;
                                    PageNo.Visibility  = Visibility.Visible;
                                    TrainMe.Visibility = Visibility.Collapsed;
                                });

                                await ShowMessage("Will you help me?  If so, make sure I can see you face and click \"Yse\"", 1);
                            }

                            if (faces.Count() > 1)
                            {
                                await ShowMessage("Can only identify when multiple faces are visible.");

                                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                                {
                                    // Disable the Yes/No buttons.
                                    PageYes.Visibility = Visibility.Collapsed;
                                    PageNo.Visibility  = Visibility.Collapsed;
                                });
                            }
                            else
                            {
                                await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                                {
                                    // Enable the Yes/No Buttons
                                    PageYes.Visibility = Visibility.Visible;
                                    PageNo.Visibility  = Visibility.Visible;
                                    TrainMe.Visibility = Visibility.Collapsed;
                                });

                                var captureStream = new MemoryStream();
                                await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreatePng(), captureStream.AsRandomAccessStream());

                                captureStream.AsRandomAccessStream().Seek(0);

                                // ask the face api what it sees
                                // See: https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/howtodetectfacesinimage
                                globalFaces = await faceServiceClient.DetectAsync(captureStream, true, true, requiredFaceAttributes);

                                if (random.Next(3) == 0 && imageNeededCount > 0)
                                {
                                    imageNeededCount--;
                                    SavePicture(mediaCapture);

                                    if (imageNeededCount == 0)
                                    {
                                        await ShowMessage("Ok, you have been recognized...", 1000);

                                        AddToFaceIdList();
                                    }
                                }
                                ;
                            }

                            var previewFrameSize = new Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
                            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                            {
                                ShowFaceTracking(faces, previewFrameSize);
                                ShowIdentificationiStatus(globalFaces);
                            });

                            var firstFace = faces.FirstOrDefault();
                        }
                        else
                        {
                            facesExistInFrame = false;
                            // reset the stuff because there are no faces to analyze.

                            await ShowMessage(String.Empty);

                            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                            {
                                ShowFaceTracking(faces, new Size());
                            });
                        }

                        var test = faces.Count();
                    }
                }
            }
            catch (Exception ex)
            {
                var test = ex;


                // face detection failed for some reason.
            }
            finally
            {
                frameProcessingSimaphore.Release();
            }
        }
Example #11
0
        /// <summary>
        /// Запустить сканирование
        /// </summary>
        public void StartScanning()
        {
            IsStarted = true;

            // Get our preview properties
            var previewProperties = _capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

            _scanningTimer = new Timer(async(state) =>
            {
                if (_isProcessingFrame || _capture == null || _capture.CameraStreamState != CameraStreamState.Streaming)
                {
                    _scanningTimer?.Change(DelayBetweenAnalyzingFrames, Timeout.Infinite);
                    return;
                }

                var token = _scanCancellationToken.Token;

                var delay = DelayBetweenAnalyzingFrames;

                _isProcessingFrame = true;

                VideoFrame destFrame = null;
                VideoFrame frame     = null;

                try
                {
                    // Setup a frame to use as the input settings
                    destFrame = new VideoFrame(Windows.Graphics.Imaging.BitmapPixelFormat.Bgra8, (int)previewProperties.Width, (int)previewProperties.Height);

                    // Get preview
                    frame = await _capture.GetPreviewFrameAsync(destFrame);
                }
                catch (Exception ex)
                {
                    Debug.WriteLine("GetPreviewFrame Failed: {0}", ex);
                }

                if (token.IsCancellationRequested)
                {
                    return;
                }

                Result result = null;

                // Try decoding the image
                try
                {
                    if (frame != null)
                    {
                        await _dispatcher.RunAsync(CoreDispatcherPriority.Low, () =>
                        {
                            if (_bitmap == null)
                            {
                                _bitmap = new WriteableBitmap(frame.SoftwareBitmap.PixelWidth, frame.SoftwareBitmap.PixelHeight);
                            }

                            frame.SoftwareBitmap.CopyToBuffer(_bitmap.PixelBuffer);

                            result = _barcodeReader.Decode(_bitmap);

                            if (destFrame != null)
                            {
                                destFrame.Dispose();
                                destFrame = null;
                            }

                            frame.Dispose();
                            frame = null;
                        });
                    }
                }
                catch (Exception ex)
                {
                }

                if (token.IsCancellationRequested)
                {
                    return;
                }

                if (result != null)
                {
                    CodeScanned?.Invoke(this, result);

                    delay = DelayBetweenContinuousScans;
                }

                _isProcessingFrame = false;

                _scanningTimer?.Change(delay, Timeout.Infinite);
            }, null, InitialDelayBeforeAnalyzingFrames, Timeout.Infinite);
        }
Example #12
0
        public async Task <VideoFrame> GetVideoFrameAsync()
        {
            var ss = await mediaCapture.GetPreviewFrameAsync();

            return(ss);
        }