public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { this.handTrackingValue = FetchNextHandTrackingValue(); // this.handLandmarks = handTrackingValue.HandLandmarkLists RenderAnnotation(screenController, handTrackingValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var faceMeshValue = FetchNextFaceMeshValue(); RenderAnnotation(screenController, faceMeshValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var detections = FetchNextOutputDetections(); RenderAnnotation(screenController, detections); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var detections = FetchNextFaceDetectionsPresence() ? FetchNextFaceDetections() : new List <Detection>(); RenderAnnotation(screenController, detections); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var poseTrackingValue = FetchNextPoseTrackingValue(); RenderAnnotation(screenController, poseTrackingValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var handTrackingValue = FetchNextHandTrackingValue(); RenderAnnotation(screenController, handTrackingValue); UpdateGestureModel(handTrackingValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { #if UNITY_ANDROID // MediaPipe renders to the texture directly. return; #else if (!outputStreamPoller.Next(outputPacket)) { Debug.LogWarning("Failed to fetch an output packet, rendering the input image"); screenController.DrawScreen(textureFrame); return; } using (var gpuBuffer = outputPacket.Get()) { ImageFrame imageFrame = null; gpuHelper.RunInGlContext(() => { var gpuBufferFormat = gpuBuffer.Format(); var sourceTexture = gpuHelper.CreateSourceTexture(gpuBuffer); imageFrame = new ImageFrame( gpuBufferFormat.ImageFormatFor(), gpuBuffer.Width(), gpuBuffer.Height(), ImageFrame.kGlDefaultAlignmentBoundary); gpuHelper.BindFramebuffer(sourceTexture); var info = gpuBufferFormat.GlTextureInfoFor(0); Gl.ReadPixels(0, 0, sourceTexture.width, sourceTexture.height, info.glFormat, info.glType, imageFrame.MutablePixelData()); Gl.Flush(); sourceTexture.Release(); return(Status.Ok(false)); }).AssertOk(); if (imageFrame != null) // always true { screenController.DrawScreen(imageFrame); } } #endif }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { #if UNITY_ANDROID && !UNITY_EDITOR // MediaPipe renders the result to the screen directly. #elif UNITY_IOS using (var imageFrame = FetchNext(outputStreamPoller, outputPacket, outputStream)) { screenController.DrawScreen(imageFrame); } #else lock (outputImageLock) { if (outputImage == null) { return; } screenController.DrawScreen(outputImage); outputImage.Dispose(); outputImage = null; } #endif }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { lock (outputImageLock) { if (outputImage == null) { return; } screenController.DrawScreen(outputImage); outputImage.Dispose(); outputImage = null; } }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { using (var imageFrame = FetchNext(outputStreamPoller, outputPacket, outputStream)) { screenController.DrawScreen(imageFrame); } }