public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var detections = FetchNextOutputDetections(); RenderAnnotation(screenController, detections); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var holisticValue = FetchNextHolisticValue(); RenderAnnotation(screenController, holisticValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var poseTrackingValue = FetchNextPoseTrackingValue(); RenderAnnotation(screenController, poseTrackingValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var detections = FetchNextFaceDetectionsPresence() ? FetchNextFaceDetections() : new List <Detection>(); RenderAnnotation(screenController, detections); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var handTrackingValue = FetchNextHandTrackingValue(); RenderAnnotation(screenController, handTrackingValue); UpdateGestureModel(handTrackingValue); screenController.DrawScreen(textureFrame); }
public override void RenderOutput(WebCamScreenController screenController, PixelData pixelData) { var hairMask = FetchNextHairMask(); var texture = screenController.GetScreen(); texture.SetPixels32(pixelData.Colors); RenderAnnotation(screenController, hairMask); texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { var hairMask = FetchNextHairMask(); var texture = screenController.GetScreen(); texture.SetPixels32(textureFrame.GetPixels32()); RenderAnnotation(screenController, hairMask); texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, PixelData pixelData) { var faceMeshValue = FetchNextFaceMeshValue(); RenderAnnotation(screenController, faceMeshValue); var texture = screenController.GetScreen(); texture.SetPixels32(pixelData.Colors); texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, PixelData pixelData) { var detections = FetchNextOutputDetections(); RenderAnnotation(screenController, detections); var texture = screenController.GetScreen(); texture.SetPixels32(pixelData.Colors); texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, Color32[] pixelData) { var handTrackingValue = FetchNextHandTrackingValue(); RenderAnnotation(screenController, handTrackingValue); var texture = screenController.GetScreen(); texture.SetPixels32(pixelData); texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { lock (outputImageLock) { if (outputImage == null) { return; } screenController.DrawScreen(outputImage); outputImage.Dispose(); outputImage = null; } }
public override void RenderOutput(WebCamScreenController screenController, Color32[] pixelData) { var texture = screenController.GetScreen(); if (!outputStreamPoller.Next(outputPacket)) { Debug.LogWarning("Failed to fetch an output packet, rendering the input image"); texture.SetPixels32(pixelData); } else { texture.SetPixels32(outputPacket.GetValue().GetColor32s()); } texture.Apply(); }
IEnumerator WaitForCamera(WebCamScreenController webCamScreenController) { var waitFrame = MAX_WAIT_FRAME; yield return(new WaitUntil(() => { waitFrame--; var isWebCamPlaying = webCamScreenController.isPlaying; if (!isWebCamPlaying && waitFrame % 50 == 0) { Debug.Log($"Waiting for a WebCamDevice"); } return isWebCamPlaying || waitFrame < 0; })); }
private void RenderAnnotation(WebCamScreenController screenController, HandTrackingValue value) { // NOTE: input image is flipped /* * GetComponent<HandTrackingAnnotationController>().Draw( * screenController.transform, value.HandLandmarkLists, value.Handednesses, value.PalmDetections, value.PalmRects, true); */ Transform modified = GameObject.Find("WebCamCopy").transform; modified.position = screenController.transform.position; modified.rotation = screenController.transform.rotation; modified.localScale = new Vector3(screenController.transform.localScale.x, screenController.transform.localScale.z, screenController.transform.localScale.y); GetComponent <HandTrackingAnnotationController>().Draw( modified, value.HandLandmarkLists, value.Handednesses, value.PalmDetections, value.PalmRects, true); }
public override void RenderOutput(WebCamScreenController screenController, PixelData pixelData) { var texture = screenController.GetScreen(); if (!outputStreamPoller.Next(outputPacket)) { Debug.LogWarning("Failed to fetch an output packet, rendering the input image"); texture.SetPixels32(pixelData.Colors); texture.Apply(); return; } ImageFrame outputFrame = null; var status = gpuHelper.RunInGlContext(() => { var gpuFrame = outputPacket.Get(); var gpuFrameFormat = gpuFrame.Format(); var sourceTexture = gpuHelper.CreateSourceTexture(gpuFrame); outputFrame = new ImageFrame( gpuFrameFormat.ImageFormatFor(), gpuFrame.Width(), gpuFrame.Height(), ImageFrame.kGlDefaultAlignmentBoundary); gpuHelper.BindFramebuffer(sourceTexture); var info = gpuFrameFormat.GlTextureInfoFor(0); Gl.ReadPixels(0, 0, sourceTexture.width, sourceTexture.height, info.glFormat, info.glType, outputFrame.MutablePixelData()); Gl.Flush(); sourceTexture.Release(); return(Status.Ok(false)); }); if (status.ok) { texture.SetPixels32(outputFrame.GetColor32s()); } else { Debug.LogError(status.ToString()); texture.SetPixels32(pixelData.Colors); } texture.Apply(); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { #if UNITY_ANDROID // MediaPipe renders to the texture directly. return; #else if (!outputStreamPoller.Next(outputPacket)) { Debug.LogWarning("Failed to fetch an output packet, rendering the input image"); screenController.DrawScreen(textureFrame); return; } using (var gpuBuffer = outputPacket.Get()) { ImageFrame imageFrame = null; gpuHelper.RunInGlContext(() => { var gpuBufferFormat = gpuBuffer.Format(); var sourceTexture = gpuHelper.CreateSourceTexture(gpuBuffer); imageFrame = new ImageFrame( gpuBufferFormat.ImageFormatFor(), gpuBuffer.Width(), gpuBuffer.Height(), ImageFrame.kGlDefaultAlignmentBoundary); gpuHelper.BindFramebuffer(sourceTexture); var info = gpuBufferFormat.GlTextureInfoFor(0); Gl.ReadPixels(0, 0, sourceTexture.width, sourceTexture.height, info.glFormat, info.glType, imageFrame.MutablePixelData()); Gl.Flush(); sourceTexture.Release(); return(Status.Ok(false)); }).AssertOk(); if (imageFrame != null) // always true { screenController.DrawScreen(imageFrame); } } #endif }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { #if UNITY_ANDROID && !UNITY_EDITOR // MediaPipe renders the result to the screen directly. #elif UNITY_IOS using (var imageFrame = FetchNext(outputStreamPoller, outputPacket, outputStream)) { screenController.DrawScreen(imageFrame); } #else lock (outputImageLock) { if (outputImage == null) { return; } screenController.DrawScreen(outputImage); outputImage.Dispose(); outputImage = null; } #endif }
private void RenderAnnotation(WebCamScreenController screenController, PoseTrackingValue value) { // NOTE: input image is flipped GetComponent <PoseTrackingAnnotationController>().Draw(screenController.transform, value.PoseLandmarkList, value.PoseDetection, true); }
private void RenderAnnotation(WebCamScreenController screenController, FaceMeshValue value) { // NOTE: input image is flipped GetComponent <FaceMeshAnnotationController>().Draw( screenController.transform, value.MultiFaceLandmarks, value.FaceRectsFromLandmarks, value.FaceDetections, true); }
public abstract void RenderOutput(WebCamScreenController screenController, PixelData pixelData);
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { using (var imageFrame = FetchNext(outputStreamPoller, outputPacket, outputStream)) { screenController.DrawScreen(imageFrame); } }
private void RenderAnnotation(WebCamScreenController screenController, HandTrackingValue value) { // NOTE: input image is flipped annotation.GetComponent <HandTrackingAnnotationController>().Draw( screenController.transform, value.Handedness, value.HandLandmarkList, value.HandRect, value.PalmDetections, true); }
private void RenderAnnotation(WebCamScreenController screenController, List <Detection> detections) { // NOTE: input image is flipped GetComponent <DetectionListAnnotationController>().Draw(screenController.transform, detections, true); }
private void RenderAnnotation(WebCamScreenController screenController, HandTrackingValue value) { // NOTE: input image is flipped GetComponent <IDrawableHandGraph>().Draw( screenController.transform, value.HandLandmarkLists, value.Handednesses, value.PalmDetections, value.PalmRects, true); }
public abstract void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame);
private void RenderAnnotation(WebCamScreenController screenController, ImageFrame hairMask) { // NOTE: input image is flipped GetComponent <MaskAnnotationController>().Draw(screenController.GetScreen(), hairMask, new Color(0, 0, 255), true); }
private void RenderAnnotation(WebCamScreenController screenController, IrisTrackingValue value) { // NOTE: input image is flipped GetComponent <IrisTrackingAnnotationController>().Draw( screenController.transform, value.FaceLandmarksWithIris, value.FaceRect, value.FaceDetections, true); }
private void RenderAnnotation(WebCamScreenController screenController, MultiHandTrackingValue value) { // NOTE: input image is flipped GetComponent <MultiHandTrackingAnnotationController>().Draw( screenController.transform, value.MultiHandLandmarks, value.MultiPalmDetections, value.MultiPalmRects, true); }
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) { // MediaPipe renders to the texture directly. return; }