IEnumerator Run() { var graphInitRequest = graphRunner.WaitForInit(); var imageSource = ImageSourceProvider.imageSource; yield return(imageSource.Play()); if (!imageSource.isPrepared) { Logger.LogError(TAG, "Failed to start ImageSource, exiting..."); yield break; } // NOTE: The screen will be resized later, keeping the aspect ratio. SetupScreen(screen, imageSource); screen.texture = imageSource.GetCurrentTexture(); worldAnnotationArea.localEulerAngles = imageSource.rotation.Reverse().GetEulerAngles(); Logger.LogInfo(TAG, $"Model Complexity = {modelComplexity}"); Logger.LogInfo(TAG, $"Smooth Landmarks = {smoothLandmarks}"); Logger.LogInfo(TAG, $"Detect Iris = {detectIris}"); Logger.LogInfo(TAG, $"Timeout Millisec = {timeoutMillisec}"); Logger.LogInfo(TAG, $"Running Mode = {runningMode}"); yield return(graphInitRequest); if (graphInitRequest.isError) { Logger.LogError(TAG, graphInitRequest.error); yield break; } if (runningMode == RunningMode.Async) { graphRunner.OnPoseDetectionOutput.AddListener(OnPoseDetectionOutput); graphRunner.OnFaceLandmarksOutput.AddListener(OnFaceLandmarksOutput); graphRunner.OnPoseLandmarksOutput.AddListener(OnPoseLandmarksOutput); graphRunner.OnLeftHandLandmarksOutput.AddListener(OnLeftHandLandmarksOutput); graphRunner.OnRightHandLandmarksOutput.AddListener(OnRightHandLandmarksOutput); graphRunner.OnLeftIrisLandmarksOutput.AddListener(OnLeftIrisLandmarksOutput); graphRunner.OnRightIrisLandmarksOutput.AddListener(OnRightIrisLandmarksOutput); graphRunner.OnPoseWorldLandmarksOutput.AddListener(OnPoseWorldLandmarksOutput); graphRunner.OnPoseRoiOutput.AddListener(OnPoseRoiOutput); graphRunner.StartRunAsync(imageSource).AssertOk(); } else { graphRunner.StartRun(imageSource).AssertOk(); } // Use RGBA32 as the input format. // TODO: When using GpuBuffer, MediaPipe assumes that the input format is BGRA, so the following code must be fixed. textureFramePool.ResizeTexture(imageSource.textureWidth, imageSource.textureHeight, TextureFormat.RGBA32); SetupAnnotationController(poseDetectionAnnotationController, imageSource); SetupAnnotationController(holisticAnnotationController, imageSource); SetupAnnotationController(poseWorldLandmarksAnnotationController, imageSource); SetupAnnotationController(poseRoiAnnotationController, imageSource); while (true) { yield return(new WaitWhile(() => isPaused)); var textureFrameRequest = textureFramePool.WaitForNextTextureFrame(); yield return(textureFrameRequest); var textureFrame = textureFrameRequest.result; // Copy current image to TextureFrame ReadFromImageSource(textureFrame, runningMode, graphRunner.configType); graphRunner.AddTextureFrameToInputStream(textureFrame).AssertOk(); if (runningMode == RunningMode.Sync) { // When running synchronously, wait for the outputs here (blocks the main thread). var value = graphRunner.FetchNextValue(); poseDetectionAnnotationController.DrawNow(value.poseDetection); holisticAnnotationController.DrawNow(value.faceLandmarks, value.poseLandmarks, value.leftHandLandmarks, value.rightHandLandmarks, value.leftIrisLandmarks, value.rightIrisLandmarks); poseWorldLandmarksAnnotationController.DrawNow(value.poseWorldLandmarks); poseRoiAnnotationController.DrawNow(value.poseRoi); } yield return(new WaitForEndOfFrame()); } }
IEnumerator Run() { var graphInitRequest = graphRunner.WaitForInit(); var imageSource = ImageSourceProvider.imageSource; yield return(imageSource.Play()); if (!imageSource.isPrepared) { Logger.LogError(TAG, "Failed to start ImageSource, exiting..."); yield break; } // NOTE: The screen will be resized later, keeping the aspect ratio. SetupScreen(screen, imageSource); screen.texture = imageSource.GetCurrentTexture(); Logger.LogInfo(TAG, $"Running Mode = {runningMode}"); yield return(graphInitRequest); if (graphInitRequest.isError) { Logger.LogError(TAG, graphInitRequest.error); yield break; } if (runningMode == RunningMode.Async) { graphRunner.OnOutputDetectionsOutput.AddListener(OnOutputDetectionsOutput); graphRunner.StartRunAsync(imageSource).AssertOk(); } else { graphRunner.StartRun(imageSource).AssertOk(); } // Use RGBA32 as the input format. // TODO: When using GpuBuffer, MediaPipe assumes that the input format is BGRA, so the following code must be fixed. textureFramePool.ResizeTexture(imageSource.textureWidth, imageSource.textureHeight, TextureFormat.RGBA32); SetupAnnotationController(outputDetectionsAnnotationController, imageSource); while (true) { yield return(new WaitWhile(() => isPaused)); var textureFrameRequest = textureFramePool.WaitForNextTextureFrame(); yield return(textureFrameRequest); var textureFrame = textureFrameRequest.result; // Copy current image to TextureFrame ReadFromImageSource(textureFrame, runningMode, graphRunner.configType); graphRunner.AddTextureFrameToInputStream(textureFrame).AssertOk(); if (runningMode == RunningMode.Sync) { // When running synchronously, wait for the outputs here (blocks the main thread). var detections = graphRunner.FetchNextDetections(); outputDetectionsAnnotationController.DrawNow(detections); } yield return(new WaitForEndOfFrame()); } }
IEnumerator Run() { var graphInitRequest = graphRunner.WaitForInit(); var imageSource = ImageSourceProvider.imageSource; yield return imageSource.Play(); if (!imageSource.isPrepared) { Logger.LogError(TAG, "Failed to start ImageSource, exiting..."); yield break; } // NOTE: The screen will be resized later, keeping the aspect ratio. screen.rectTransform.sizeDelta = new Vector2(imageSource.textureWidth, imageSource.textureHeight); screen.rectTransform.localEulerAngles = imageSource.rotation.Reverse().GetEulerAngles(); Logger.LogInfo(TAG, $"Running Mode = {runningMode}"); // Use RGBA32 as the input format. // TODO: When using GpuBuffer, MediaPipe assumes that the input format is BGRA, so the following code must be fixed. textureFramePool.ResizeTexture(imageSource.textureWidth, imageSource.textureHeight, TextureFormat.RGBA32); // Setup output texture if (graphRunner.configType == GraphRunner.ConfigType.OpenGLES) { var textureFrameRequest = textureFramePool.WaitForNextTextureFrame(); yield return textureFrameRequest; var outputTexture = textureFrameRequest.result; // Exclude from TextureFramePool outputTexture.RemoveAllReleaseListeners(); graphRunner.SetupOutputPacket(outputTexture); screen.texture = Texture2D.CreateExternalTexture(outputTexture.width, outputTexture.height, outputTexture.format, false, false, outputTexture.GetNativeTexturePtr()); } else { outputTexture = new Texture2D(imageSource.textureWidth, imageSource.textureHeight, TextureFormat.RGBA32, false); screen.texture = outputTexture; outputBuffer = new Color32[imageSource.textureWidth * imageSource.textureHeight]; } yield return graphInitRequest; if (graphInitRequest.isError) { Logger.LogError(TAG, graphInitRequest.error); yield break; } if (runningMode == RunningMode.Async) { graphRunner.OnOutput.AddListener(OnOutput); graphRunner.StartRunAsync(imageSource).AssertOk(); } else { graphRunner.StartRun(imageSource).AssertOk(); } while (true) { yield return new WaitWhile(() => isPaused); var textureFrameRequest = textureFramePool.WaitForNextTextureFrame(); yield return textureFrameRequest; var textureFrame = textureFrameRequest.result; // Copy current image to TextureFrame ReadFromImageSource(textureFrame, runningMode, graphRunner.configType); graphRunner.AddTextureFrameToInputStream(textureFrame).AssertOk(); if (runningMode == RunningMode.Sync && graphRunner.configType != GraphRunner.ConfigType.OpenGLES) { // When running synchronously, wait for the outputs here (blocks the main thread). var output = graphRunner.FetchNextValue(); DrawNow(output); } yield return new WaitForEndOfFrame(); } }