public void Update() { // Process command. switch (m_Command) { case CommandType.None: break; case CommandType.Create: { m_TextureReaderApi.Create(ImageFormat, ImageWidth, ImageHeight, ImageSampleMode == SampleMode.KeepAspectRatio); if (Frame.CameraImage.Texture != null) { int textureId = Frame.CameraImage.Texture.GetNativeTexturePtr().ToInt32(); m_ImageBufferIndex = m_TextureReaderApi.SubmitFrame(textureId, k_ARCoreTextureWidth, k_ARCoreTextureHeight); } m_Command = CommandType.None; break; } case CommandType.Reset: { m_TextureReaderApi.ReleaseFrame(m_ImageBufferIndex); m_TextureReaderApi.Destroy(); m_TextureReaderApi.Create(ImageFormat, ImageWidth, ImageHeight, ImageSampleMode == SampleMode.KeepAspectRatio); m_ImageBufferIndex = -1; break; } case CommandType.ReleasePreviousBuffer: { // Clear previously used buffer, and submits a new request. m_TextureReaderApi.ReleaseFrame(m_ImageBufferIndex); m_ImageBufferIndex = -1; break; } case CommandType.ProcessNextFrame: { if (m_ImageBufferIndex >= 0) { // Get image pixels from previously submitted request. int bufferSize = 0; IntPtr pixelBuffer = m_TextureReaderApi.AcquireFrame(m_ImageBufferIndex, ref bufferSize); if (pixelBuffer != IntPtr.Zero) { OnImageAvailableCallback?.Invoke(ImageFormat, ImageWidth, ImageHeight, pixelBuffer, bufferSize); } // Release the texture reader internal buffer. m_TextureReaderApi.ReleaseFrame(m_ImageBufferIndex); } break; } default: break; } }
/// <summary> /// Handles a new CPU image. /// </summary> /// <param name="width">Width of the image, in pixels.</param> /// <param name="height">Height of the image, in pixels.</param> /// <param name="rowStride">Row stride of the image, in pixels.</param> /// <param name="pixelBuffer">Pointer to raw image buffer.</param> private void OnImageAvailable(int width, int height, int rowStride, IntPtr pixelBuffer) { if (DebugBackground != null && !DebugBackground.enabled || OnImageAvailableCallback == null) { return; } if (DebugTexture == null || ImageBuffer == null || DebugTexture.width != width || DebugTexture.height != height) { DebugTexture = new Texture2D(width, height, TextureFormat.R8, false, false); ImageBuffer = new byte[width * height]; CameraImageToDisplayUvTransformation = Frame.CameraImage.ImageDisplayUvs; } if (CachedOrientation != Screen.orientation || Mathf.Abs(CachedScreenDimensions.x - Screen.width) < Delta || Mathf.Abs(CachedScreenDimensions.y - Screen.height) < Delta) { CameraImageToDisplayUvTransformation = Frame.CameraImage.ImageDisplayUvs; CachedOrientation = Screen.orientation; CachedScreenDimensions = new Vector2(Screen.width, Screen.height); } if (ImageProcessor.ProcessImage(ImageBuffer, pixelBuffer, width, height, rowStride)) { DebugTexture.LoadRawTextureData(ImageBuffer); DebugTexture.Apply(); OnImageAvailableCallback.Invoke(ImageBuffer, width, height); #region RenderCameraStreem if (DebugBackground != null) { DebugBackground.material.SetTexture("_ImageTex", DebugTexture); const string TOP_LEFT_RIGHT = "_UvTopLeftRight"; const string BOTTOM_LEFT_RIGHT = "_UvBottomLeftRight"; DebugBackground.material.SetVector(TOP_LEFT_RIGHT, new Vector4( CameraImageToDisplayUvTransformation.TopLeft.x, CameraImageToDisplayUvTransformation.TopLeft.y, CameraImageToDisplayUvTransformation.TopRight.x, CameraImageToDisplayUvTransformation.TopRight.y)); DebugBackground.material.SetVector(BOTTOM_LEFT_RIGHT, new Vector4( CameraImageToDisplayUvTransformation.BottomLeft.x, CameraImageToDisplayUvTransformation.BottomLeft.y, CameraImageToDisplayUvTransformation.BottomRight.x, CameraImageToDisplayUvTransformation.BottomRight.y)); } #endregion } }
/// <summary> /// Handles a new CPU image. /// </summary> /// <param name="width">Width of the image, in pixels.</param> /// <param name="height">Height of the image, in pixels.</param> /// <param name="rowStride">Row stride of the image, in pixels.</param> /// <param name="pixelBuffer">Pointer to raw image buffer.</param> /// <param name="bufferSize">The size of the image buffer, in bytes.</param> private void _OnImageAvailable(int width, int height, int rowStride, IntPtr pixelBuffer, int bufferSize) { if (!EdgeDetectionBackgroundImage.enabled) { return; } if (m_EdgeDetectionBackgroundTexture == null || m_EdgeDetectionResultImage == null || m_EdgeDetectionBackgroundTexture.width != width || m_EdgeDetectionBackgroundTexture.height != height) { m_EdgeDetectionBackgroundTexture = new Texture2D(width, height, TextureFormat.R8, false, false); m_EdgeDetectionResultImage = new byte[width * height]; m_CameraImageToDisplayUvTransformation = Frame.CameraImage.ImageDisplayUvs; } if (m_CachedOrientation != Screen.orientation || m_CachedScreenDimensions.x != Screen.width || m_CachedScreenDimensions.y != Screen.height) { m_CameraImageToDisplayUvTransformation = Frame.CameraImage.ImageDisplayUvs; m_CachedOrientation = Screen.orientation; m_CachedScreenDimensions = new Vector2(Screen.width, Screen.height); } // Detect edges within the image. if (EdgeDetector.Detect2(m_EdgeDetectionResultImage, pixelBuffer, width, height, rowStride)) { // Update the rendering texture with the edge image. m_EdgeDetectionBackgroundTexture.LoadRawTextureData(m_EdgeDetectionResultImage); m_EdgeDetectionBackgroundTexture.Apply(); OnImageAvailableCallback?.Invoke(m_EdgeDetectionResultImage, width, height); EdgeDetectionBackgroundImage.material.SetTexture("_ImageTex", m_EdgeDetectionBackgroundTexture); const string TOP_LEFT_RIGHT = "_UvTopLeftRight"; const string BOTTOM_LEFT_RIGHT = "_UvBottomLeftRight"; EdgeDetectionBackgroundImage.material.SetVector(TOP_LEFT_RIGHT, new Vector4( m_CameraImageToDisplayUvTransformation.TopLeft.x, m_CameraImageToDisplayUvTransformation.TopLeft.y, m_CameraImageToDisplayUvTransformation.TopRight.x, m_CameraImageToDisplayUvTransformation.TopRight.y)); EdgeDetectionBackgroundImage.material.SetVector(BOTTOM_LEFT_RIGHT, new Vector4( m_CameraImageToDisplayUvTransformation.BottomLeft.x, m_CameraImageToDisplayUvTransformation.BottomLeft.y, m_CameraImageToDisplayUvTransformation.BottomRight.x, m_CameraImageToDisplayUvTransformation.BottomRight.y)); } }
/**<summary> Capture image from ARCore camera stream </summary>*/ private IEnumerator _CaptureImage() { m_TextureReaderApi.Create(ImageFormat, ImageWidth, ImageHeight, ImageSampleMode == SampleMode.KeepAspectRatio); if (Frame.CameraImage.Texture != null) { int textureId = Frame.CameraImage.Texture.GetNativeTexturePtr().ToInt32(); m_ImageBufferIndex = m_TextureReaderApi.SubmitFrame(textureId, k_ARCoreTextureWidth, k_ARCoreTextureHeight); } yield return(new WaitForEndOfFrame()); if (m_ImageBufferIndex >= 0) { // Get image pixels from previously submitted request. int bufferSize = 0; IntPtr pixelBuffer = m_TextureReaderApi.AcquireFrame(m_ImageBufferIndex, ref bufferSize); if (pixelBuffer != IntPtr.Zero) { OnImageAvailableCallback?.Invoke(ImageFormat, ImageWidth, ImageHeight, pixelBuffer, bufferSize); } } }