Example #1
0
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Attempt to get the latest camera image. If this method succeeds,
        // it acquires a native resource that must be disposed (see below).
        CameraImage image;

        if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
        {
            return;
        }

        // Display some information about the camera image
        m_ImageInfo.text = string.Format(
            "Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
            image.width, image.height, image.planeCount, image.timestamp, image.format);

        // Once we have a valid CameraImage, we can access the individual image "planes"
        // (the separate channels in the image). CameraImage.GetPlane provides
        // low-overhead access to this data. This could then be passed to a
        // computer vision algorithm. Here, we will convert the camera image
        // to an RGBA texture and draw it on the screen.

        // Choose an RGBA format.
        // See CameraImage.FormatSupported for a complete list of supported formats.
        var format = TextureFormat.RGBA32;

        if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
        {
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        // Convert the image to format, flipping the image across the Y axis.
        // We can also get a sub rectangle, but we'll get the full image here.
        var conversionParams = new CameraImageConversionParams(image, format, CameraImageTransformation.MirrorY);

        // Texture2D allows us write directly to the raw texture data
        // This allows us to do the conversion in-place without making any copies.
        var rawTextureData = m_Texture.GetRawTextureData <byte>();

        try
        {
            image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
        }
        finally
        {
            // We must dispose of the CameraImage after we're finished
            // with it to avoid leaking native resources.
            image.Dispose();
        }

        // Apply the updated texture data to our texture
        m_Texture.Apply();

        // Set the RawImage's texture so we can visualize it.
        m_RawImage.texture = m_Texture;
    }
Example #2
0
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Attempt to get the latest camera image. If this method succeeds,
        // it acquires a native resource that must be disposed (see below).
        CameraImage image;

        if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
        {
            return;
        }

        // Display some information about the camera image
        m_ImageInfo.text = string.Format(
            "Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
            image.width, image.height, image.planeCount, image.timestamp, image.format);

        // Choose an RGBA format.
        // See CameraImage.FormatSupported for a complete list of supported formats.
        var format = TextureFormat.RGBA32;

        if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
        {
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        // Convert the image to format, flipping the image across the Y axis.
        // We can also get a sub rectangle, but we'll get the full image here.
        var conversionParams = new CameraImageConversionParams(image, format, CameraImageTransformation.None);

        // Texture2D allows us write directly to the raw texture data
        // This allows us to do the conversion in-place without making any copies.
        var rawTextureData = m_Texture.GetRawTextureData <byte>();

        try
        {
            image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
        }
        finally
        {
            // We must dispose of the CameraImage after we're finished
            // with it to avoid leaking native resources.
            image.Dispose();
        }

        // Apply the updated texture data to our texture
        m_Texture.Apply();

        // Run TensorFlow inference on the texture
        RunTF(m_Texture);
    }
Example #3
0
        /// <summary>
        /// Gets the image data from ARFoundation, preps it, and drops it into captureTex.
        /// </summary>
        /// <param name="aOnFinished">Gets called when this method is finished with getting the image.</param>
        private void GrabScreen(Action aOnFinished)
        {
            // Grab the latest image from ARFoundation
            CameraImage image;

            if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
            {
                Debug.LogError("[CameraCaptureARFoundation] Could not get latest image!");
                return;
            }

            // Set up resizing parameters
            Vector2Int size             = resolution.AdjustSize(new Vector2Int(image.width, image.height));
            var        conversionParams = new CameraImageConversionParams
            {
                inputRect        = new RectInt(0, 0, image.width, image.height),
                outputDimensions = new Vector2Int(size.x, size.y),
                outputFormat     = TextureFormat.RGB24,
                transformation   = CameraImageTransformation.MirrorY,
            };

            // make sure we have a texture to store the resized image
            if (captureTex == null || captureTex.width != size.x || captureTex.height != size.y)
            {
                if (captureTex != null)
                {
                    GameObject.Destroy(captureTex);
                }
                captureTex = new Texture2D(size.x, size.y, TextureFormat.RGB24, false);
            }

            // And do the resize!
            image.ConvertAsync(conversionParams, (status, p, data) =>
            {
                if (status == AsyncCameraImageConversionStatus.Ready)
                {
                    captureTex.LoadRawTextureData(data);
                    captureTex.Apply();
                }
                if (status == AsyncCameraImageConversionStatus.Ready || status == AsyncCameraImageConversionStatus.Failed)
                {
                    image.Dispose();
                    aOnFinished();
                }
            });
        }
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        CameraImage image;

        if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
        {
            return;
        }


        var format = TextureFormat.RGBA32;

        if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
        {
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }


        var conversionParams = new CameraImageConversionParams(image, format, CameraImageTransformation.None);

        conversionParams.outputDimensions = new Vector2Int(image.width / imageDownSampleFactor, image.height / imageDownSampleFactor);


        var rawTextureData = m_Texture.GetRawTextureData <byte>();

        try
        {
            image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
        }
        finally
        {
            image.Dispose();
        }


        m_Texture.Apply();



        Shader.SetGlobalColor(arCameraTextureColorID, AverageColorFromTexture(m_Texture));
    }
Example #5
0
 static internal void UnityARCore_cameraImage_createAsyncConversionRequestWithCallback(
     int nativeHandle, CameraImageConversionParams conversionParams,
     XRCameraExtensions.OnImageRequestCompleteDelegate callback, IntPtr context)
 {
     callback(AsyncCameraImageConversionStatus.Disposed, conversionParams, IntPtr.Zero, 0, context);
 }
Example #6
0
 static internal int UnityARCore_cameraImage_createAsyncConversionRequest(
     int nativeHandle, CameraImageConversionParams conversionParams)
 {
     return(0);
 }
Example #7
0
 static internal bool UnityARCore_cameraImage_tryConvert(
     int nativeHandle, CameraImageConversionParams conversionParams,
     IntPtr buffer, int bufferLength)
 {
     return(false);
 }
Example #8
0
 static internal extern void UnityARCore_cameraImage_createAsyncConversionRequestWithCallback(
     int nativeHandle, CameraImageConversionParams conversionParams,
     XRCameraExtensions.OnImageRequestCompleteDelegate callback, IntPtr context);
Example #9
0
 static internal extern bool UnityARKit_cameraImage_tryConvert(
     int nativeHandle, CameraImageConversionParams conversionParams,
     IntPtr buffer, int bufferLength);
Example #10
0
 static internal extern int UnityARKit_cameraImage_createAsyncConversionRequest(
     int nativeHandle, CameraImageConversionParams conversionParams);
 public void ConvertAsync(int nativeHandle, CameraImageConversionParams conversionParams, XRCameraExtensions.OnImageRequestCompleteDelegate callback, IntPtr context)
 {
     Api.UnityARCore_cameraImage_createAsyncConversionRequestWithCallback(
         nativeHandle, conversionParams, callback, context);
 }
 public int ConvertAsync(int nativeHandle, CameraImageConversionParams conversionParams)
 {
     return(Api.UnityARCore_cameraImage_createAsyncConversionRequest(nativeHandle, conversionParams));
 }
 public bool TryConvert(int nativeHandle, CameraImageConversionParams conversionParams, IntPtr destinationBuffer, int bufferLength)
 {
     return(Api.UnityARCore_cameraImage_tryConvert(
                nativeHandle, conversionParams, destinationBuffer, bufferLength));
 }