private static void OnFrameCapture(ref FrameCaptureInfoNative info) { OutputNative output = Marshal.PtrToStructure <OutputNative>(info.OutputPtr); ulong id = info.Id; ulong timeStamp = info.TimeStamp; bool marshalFrameData = MLMRCamera.OnFrameCapture != null; MLMRCamera.Frame.ImagePlane[] imagePlanes = new MLMRCamera.Frame.ImagePlane[(uint)output.ImagePlanesCount]; for (int i = 0; i < output.ImagePlanesCount; ++i) { ImagePlaneInfoNative planeNative = output.ImagePlanes[i]; if (byteArraysBuffers.Count <= i && marshalFrameData) { byteArraysBuffers.Add(CircularBuffer <byte[]> .Create(new byte[planeNative.Size], new byte[planeNative.Size], new byte[planeNative.Size])); } imagePlanes[i] = MLMRCamera.Frame.ImagePlane.Create(planeNative.Width, planeNative.Height, planeNative.Stride, planeNative.BytesPerPixel, planeNative.Size, planeNative.Data, (marshalFrameData) ? byteArraysBuffers[i].Get() : null); } MLMRCamera.Frame frame = MLMRCamera.Frame.Create(id, timeStamp, imagePlanes, output.Format); OnFrameCapture_NativeCallbackThread?.Invoke(frame); MLThreadDispatch.ScheduleMain(() => { MLMRCamera.OnFrameCapture?.Invoke(frame); }); }
private Task PushRGBFrame(MLMRCamera.Frame mrCameraFrame) { for (int i = 0; i < imagePlanesRGB.Length; i++) { MLMRCamera.Frame.ImagePlane imagePlane = mrCameraFrame.ImagePlanes[i]; imagePlanesRGB[i] = MLWebRTC.VideoSink.Frame.ImagePlane.Create(imagePlane.Width, imagePlane.Height, imagePlane.Stride, imagePlane.BytesPerPixel, imagePlane.Size, imagePlane.DataPtr); } MLWebRTC.VideoSink.Frame frame = MLWebRTC.VideoSink.Frame.Create(mrCameraFrame.Id, mrCameraFrame.TimeStampNs / 1000, imagePlanesRGB, MLWebRTC.VideoSink.Frame.OutputFormat.RGBA_8888); _ = this.PushFrameAsync(frame); return(Task.CompletedTask); }
private void OnMLMRCameraFrameRGB(MLMRCamera.Frame mrCameraFrame) { PushRGBFrame(mrCameraFrame); }