private void CameraServiceOnFrameArrivedAsync(object sender, FrameArrivedEventArgs e) { if (IsProcessingFrame) { return; } IsProcessingFrame = true; CameraFrame frame = e.Frame; IsProcessingFrame = true; Task.Run(() => ProcessFrameSync(frame)).ContinueWith(_ => IsProcessingFrame = false); }
private void CameraServiceOnFrameArrivedSync(object sender, FrameArrivedEventArgs e) { if (IsProcessingFrame) { return; } IsProcessingFrame = true; CameraFrame frame = e.Frame; ProcessFrameSync(frame); IsProcessingFrame = false; }
/// <summary> /// Processes the received frame, converts the image to grayscale if requested, and invokes the next photo request. /// </summary> private void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) { if (_stopped?.Task != null) { return; } if (result.resultType == PhotoCapture.CaptureResultType.UnknownError) { return; } if (photoCaptureFrame == null) { return; } Size size = new Size(FrameWidth, (double)FrameHeight * 3 / 2); // Luminance (grayscale) of the NV12 format requires image height, chrominance is stored in half resolution. <see href="https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12"/>. _image = new Mat(size, CvType.CV_8UC1); List <byte> imageBuffer = new List <byte>(); photoCaptureFrame?.CopyRawImageDataIntoBuffer(imageBuffer); MatUtils.copyToMat(imageBuffer.ToArray(), _image); if (_format == ColorFormat.Grayscale) { Imgproc.cvtColor(_image, _image, Imgproc.COLOR_YUV2GRAY_NV12); } Matrix4x4 cameraToWorldMatrix = Matrix4x4.identity; photoCaptureFrame?.TryGetCameraToWorldMatrix(out cameraToWorldMatrix); CameraExtrinsic extrinsic = new CameraExtrinsic(cameraToWorldMatrix); Matrix4x4 projectionMatrix = Matrix4x4.identity; photoCaptureFrame?.TryGetProjectionMatrix(out projectionMatrix); CameraIntrinsic intrinsic = new CameraIntrinsic(projectionMatrix); CameraFrame cameraFrame = new CameraFrame(_image, intrinsic, extrinsic, FrameWidth, FrameHeight, FrameCount++, _format); FrameArrivedEventArgs args = new FrameArrivedEventArgs(cameraFrame); FrameArrived?.Invoke(this, args); _photoCaptureObject?.TakePhotoAsync(OnCapturedPhotoToMemory); }
/// <summary> /// Invoked on each received video frame. Extracts the image according to the <see cref="ColorFormat"/> and invokes the <see cref="FrameArrived"/> event containing a <see cref="CameraFrame"/>. /// </summary> private unsafe void OnFrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args) { if (sender == null) { throw new ArgumentNullException(nameof(sender)); } if (args == null) { throw new ArgumentNullException(nameof(args)); } using (MediaFrameReference frame = sender.TryAcquireLatestFrame()) { if (frame == null) { return; } SoftwareBitmap originalSoftwareBitmap = frame.VideoMediaFrame?.SoftwareBitmap; if (originalSoftwareBitmap == null) { _logger.LogWarning("Received frame without image."); return; } CameraExtrinsic extrinsic = new CameraExtrinsic(frame.CoordinateSystem, WorldOrigin); CameraIntrinsic intrinsic = new CameraIntrinsic(frame.VideoMediaFrame.CameraIntrinsics); using (var input = originalSoftwareBitmap.LockBuffer(BitmapBufferAccessMode.Read)) using (var inputReference = input.CreateReference()) { byte *inputBytes; uint inputCapacity; ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputBytes, out inputCapacity); MatUtils.copyToMat((IntPtr)inputBytes, _bitmap); int thisFrameCount = Interlocked.Increment(ref FrameCount); // TODO: Check out of using block CameraFrame cameraFrame = new CameraFrame(_bitmap, intrinsic, extrinsic, FrameWidth, FrameHeight, (uint)thisFrameCount, _format); FrameArrivedEventArgs eventArgs = new FrameArrivedEventArgs(cameraFrame); FrameArrived?.Invoke(this, eventArgs); } originalSoftwareBitmap?.Dispose(); } }
public void Update() { if (!_available) { return; } if (!_cameraTexture.didUpdateThisFrame) { return; } Color32[] pixels32 = _cameraTexture.GetPixels32(); Utils.setDebugMode(true); Mat argbMat = new Mat(_targetVideoHeight, _targetVideoWidth, CvType.CV_8UC4); MatUtils.copyToMat(pixels32, argbMat); if (argbMat.empty()) { return; } // workaround obs cam: drop frame if grey / empty. double[] values = argbMat.get(0, 0); if (values[0] == 128 && values[1] == 129 && values[2] == 127 && values[3] == 255) { return; } Mat yuvMat = new Mat(_targetVideoHeight * 2 / 3, _targetVideoWidth, CvType.CV_8UC1); Imgproc.cvtColor(argbMat, yuvMat, Imgproc.COLOR_BGRA2YUV_I420); Mat submat = yuvMat.submat(0, _targetVideoHeight, 0, _targetVideoWidth); Core.flip(submat, submat, 0); Utils.setDebugMode(false); CameraIntrinsic intrinsic = new CameraIntrinsic(); CameraExtrinsic extrinsic = new CameraExtrinsic(Matrix4x4.identity); CameraFrame frame = new CameraFrame(submat, intrinsic, extrinsic, _targetVideoWidth, _targetVideoHeight, frameCount++, ColorFormat.Unknown); FrameArrivedEventArgs args = new FrameArrivedEventArgs(frame); FrameArrived?.Invoke(this, args); }