public void ProcessFrame() { MediaFrameReference frame = videoFrameProcessor.GetLatestFrame(); VideoMediaFrame videoMediaFrame = frame?.VideoMediaFrame; if (videoMediaFrame == null) { return; } // Validate that the incoming frame format is compatible with the FaceTracker bool isBitmapPixelFormatSupported = videoMediaFrame.SoftwareBitmap != null && FaceTracker.IsBitmapPixelFormatSupported(videoMediaFrame.SoftwareBitmap.BitmapPixelFormat); if (!isBitmapPixelFormatSupported) { return; } // Ask the FaceTracker to process this frame asynchronously IAsyncOperation <IList <DetectedFace> > processFrameTask = faceTracker.ProcessNextFrameAsync(videoMediaFrame.GetVideoFrame()); try { IList <DetectedFace> faces = processFrameTask.GetResults(); lock (@lock) { if (faces.Count == 0) { ++numFramesWithoutFaces; // The FaceTracker might lose track of faces for a few frames, for example, // if the person momentarily turns their head away from the videoFrameProcessor. To smooth out // the tracking, we allow 30 video frames (~1 second) without faces before // we say that we're no longer tracking any faces. if (numFramesWithoutFaces > 30 && latestFaces.Any()) { latestFaces.Clear(); } } else { numFramesWithoutFaces = 0; latestFaces.Clear(); foreach (var face in faces) { latestFaces.Add(face.FaceBox); } } } } catch (Exception e) { // The task might be cancelled if the FaceAnalysis failed. Debug.LogException(e); } }
//private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer) private async Task <int> ProcessCurrentVideoFrame() { // If a lock is being held it means we're still waiting for processing work on the previous frame to complete. // In this situation, don't wait on the semaphore but exit immediately. /*if (!frameProcessingSemaphore.Wait(0)) * { * Debug.WriteLine("\t --> ProcessCurrentFrame call blocked !"); * return 0; * }*/ Debug.WriteLine("\t --> ProcessCurrentFrame called properly!"); // List of detected faces /* if (videoProcessor.GetLatestFrame() != null) * {*/ Debug.WriteLine("\t --> calling videoProcessor.GetLatestFrame() !"); frame = videoProcessor.GetLatestFrame(); // frame = videoProcessor.latestFrame; /* } * else * { * //frameProcessingSemaphore.Release(); * return 0; * }*/ Debug.WriteLine("\t --> videoProcessor.GetLatestFrame() called !"); if (frame != null) { if (FaceTracker.IsBitmapPixelFormatSupported(frame.VideoMediaFrame.SoftwareBitmap.BitmapPixelFormat)) { Debug.WriteLine("\t --> Format: OK!"); // this.faces = await this.faceTracker.ProcessNextFrameAsync(frame.VideoMediaFrame.GetVideoFrame()); var faceTask = this.faceTracker.ProcessNextFrameAsync(frame.VideoMediaFrame.GetVideoFrame()); this.faces = faceTask.GetResults(); Debug.WriteLine("\t --> Frame processed!"); } else { Debug.WriteLine("\t--> Format : NOT OK!"); } } else { Debug.WriteLine("\t --> last frame was null !"); return(0); } if (this.faces.Count == 0) { this.faces.Clear(); Debug.WriteLine("No Face detected"); //FaceCoord.x = "0"; // FaceCoord.y = "0"; } else { Debug.WriteLine("Face detected"); latestfaces.Clear(); foreach (DetectedFace face in faces) { Debug.WriteLine("faces size: " + faces.Count.ToString()); latestfaces.Add(face.FaceBox); } foreach (BitmapBounds latestface in latestfaces) { Debug.WriteLine("faces size: " + latestfaces.Count.ToString()); Debug.WriteLine("faceX" + latestface.X.ToString()); Debug.WriteLine("faceY" + latestface.Y.ToString()); // FaceCoord.x = latestface.X.ToString(); // FaceCoord.y = latestface.Y.ToString(); } } // frameProcessingSemaphore.Release(); return(0); }
private async void ProcessVideoFrame(ThreadPoolTimer timer) { if (!frameProcessingSimaphore.Wait(0)) { // We are already doing something return; } try { IEnumerable <DetectedFace> faces = null; const BitmapPixelFormat inputPixelFormat = BitmapPixelFormat.Nv12; Face[] globalFaces = null; using (var previewFrame = new VideoFrame(inputPixelFormat, (int)videoProperties.Width, (int)videoProperties.Height)) { await mediaCapture.GetPreviewFrameAsync(previewFrame); if (FaceTracker.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat)) { faces = await faceTracker.ProcessNextFrameAsync(previewFrame); if (!facesExistInFrame) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Enable the Train feature and disable the other buttons PageYes.Visibility = Visibility.Collapsed; PageNo.Visibility = Visibility.Collapsed; TrainMe.Visibility = Visibility.Visible; }); } if (faces.Any()) { if (!facesExistInFrame) { facesExistInFrame = true; await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Enable the Yes/No buttons. Disable the Train Button PageYes.Visibility = Visibility.Visible; PageNo.Visibility = Visibility.Visible; TrainMe.Visibility = Visibility.Collapsed; }); await ShowMessage("Will you help me? If so, make sure I can see you face and click \"Yse\"", 1); } if (faces.Count() > 1) { await ShowMessage("Can only identify when multiple faces are visible."); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Disable the Yes/No buttons. PageYes.Visibility = Visibility.Collapsed; PageNo.Visibility = Visibility.Collapsed; }); } else { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Enable the Yes/No Buttons PageYes.Visibility = Visibility.Visible; PageNo.Visibility = Visibility.Visible; TrainMe.Visibility = Visibility.Collapsed; }); var captureStream = new MemoryStream(); await mediaCapture.CapturePhotoToStreamAsync(ImageEncodingProperties.CreatePng(), captureStream.AsRandomAccessStream()); captureStream.AsRandomAccessStream().Seek(0); // ask the face api what it sees // See: https://docs.microsoft.com/en-us/azure/cognitive-services/face/face-api-how-to-topics/howtodetectfacesinimage globalFaces = await faceServiceClient.DetectAsync(captureStream, true, true, requiredFaceAttributes); if (random.Next(3) == 0 && imageNeededCount > 0) { imageNeededCount--; SavePicture(mediaCapture); if (imageNeededCount == 0) { await ShowMessage("Ok, you have been recognized...", 1000); AddToFaceIdList(); } } ; } var previewFrameSize = new Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ShowFaceTracking(faces, previewFrameSize); ShowIdentificationiStatus(globalFaces); }); var firstFace = faces.FirstOrDefault(); } else { facesExistInFrame = false; // reset the stuff because there are no faces to analyze. await ShowMessage(String.Empty); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ShowFaceTracking(faces, new Size()); }); } var test = faces.Count(); } } } catch (Exception ex) { var test = ex; // face detection failed for some reason. } finally { frameProcessingSimaphore.Release(); } }