public async void ProcessCurrentVideoFrame(ThreadPoolTimer timer) { if (!_frameProcessingSemaphore.Wait(0)) { return; } var latestFrame = await _camera.GetLatestFrame(); SoftwareBitmap currentFrame = latestFrame as SoftwareBitmap; // Use FaceDetector.GetSupportedBitmapPixelFormats and IsBitmapPixelFormatSupported to dynamically // determine supported formats const BitmapPixelFormat faceDetectionPixelFormat = BitmapPixelFormat.Nv12; if (currentFrame == null || currentFrame.BitmapPixelFormat != faceDetectionPixelFormat) { _frameProcessingSemaphore.Release(); return; } try { IList <DetectedFace> detectedFaces = await _faceDetector.DetectFacesAsync(currentFrame); if (detectedFaces.Count == 0) { NoFaceDetected?.Invoke(this, null); } else if (detectedFaces.Count != _detectedFacesInLastFrame) { OnPreAnalysis?.Invoke(this, null); var bytes = await _camera.GetEncodedBytesAsync(currentFrame); var output = await AnalysisFunction(bytes);// currentFrame.SoftwareBitmap.ToByteArray()); UsersIdentified?.Invoke(this, output); } _detectedFacesInLastFrame = detectedFaces.Count; } catch (Exception ex) { // Face tracking failed Debug.WriteLine(ex); } finally { _frameProcessingSemaphore.Release(); } currentFrame.Dispose(); }
public async void ProcessCurrentVideoFrame(Timer timer) { OnPreAnalysis?.Invoke(this, null); var p = await Package.Current.InstalledLocation.GetFolderAsync(@"Assets"); var uri = p.Path + "/untitled.png"; var array = File.ReadAllBytes(uri); var output = await AnalysisFunction(array); UsersIdentified?.Invoke(this, output); }