Example #1
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
        }

        // Creates 3D object from image processing data
        FindRaycastPoint();
    }
        public static void GetPlaneData(out byte[] pixels, XRCameraImage image)
        {
            XRCameraImagePlane plane = image.GetPlane(0);             // use the Y plane
            int width = image.width, height = image.height;

            pixels = new byte[width * height];

            if (width == plane.rowStride)
            {
                plane.data.CopyTo(pixels);
            }
            else
            {
                unsafe
                {
                    ulong handle;
                    byte *srcPtr = (byte *)NativeArrayUnsafeUtility.GetUnsafePtr(plane.data);
                    byte *dstPtr = (byte *)UnsafeUtility.PinGCArrayAndGetDataAddress(pixels, out handle);
                    if (width > 0 && height > 0)
                    {
                        UnsafeUtility.MemCpyStride(dstPtr, width, srcPtr, plane.rowStride, width, height);
                    }
                    UnsafeUtility.ReleaseGCObject(handle);
                }
            }
        }
Example #3
0
        private IEnumerator Localize()
        {
            XRCameraImage image;

            if (cameraManager.TryGetLatestImage(out image))
            {
                Camera cam = Camera.main;
                m_stats.localizationAttemptCount++;
                Vector3    camPos     = cam.transform.position;
                Quaternion camRot     = cam.transform.rotation;
                Vector4    intrinsics = ARHelper.GetIntrinsics(cameraManager);

                int width  = image.width;
                int height = image.height;

                XRCameraImagePlane plane  = image.GetPlane(0);                // use the Y plane
                byte[]             pixels = new byte[plane.data.Length];
                plane.data.CopyTo(pixels);
                image.Dispose();

                Vector3    pos = Vector3.zero;
                Quaternion rot = Quaternion.identity;

                float startTime = Time.realtimeSinceStartup;

                Task <int> t = Task.Run(() =>
                {
                    return(Immersal.Core.LocalizeImage(out pos, out rot, width, height, ref intrinsics, pixels));
                });

                while (!t.IsCompleted)
                {
                    yield return(null);
                }

                int mapHandle = t.Result;

                if (mapHandle >= 0 && m_MapIdToOffset.ContainsKey(mapHandle))
                {
                    MapOffset mo          = m_MapIdToOffset[mapHandle];
                    float     elapsedTime = Time.realtimeSinceStartup - startTime;
                    Debug.Log(string.Format("Relocalised in {0} seconds", elapsedTime));
                    m_stats.localizationSuccessCount++;
                    Matrix4x4 cloudSpace   = mo.offset * Matrix4x4.TRS(pos, rot, Vector3.one);
                    Matrix4x4 trackerSpace = Matrix4x4.TRS(camPos, camRot, Vector3.one);
                    Matrix4x4 m            = trackerSpace * (cloudSpace.inverse);
                    mo.space.filter.RefinePose(m);
                }

                if (m_debugText != null)
                {
                    m_debugText.text = string.Format("Localization status: {0}/{1}", m_stats.localizationSuccessCount, m_stats.localizationAttemptCount);
                }
                else
                {
                    Debug.Log(string.Format("Localization status: {0}/{1}", m_stats.localizationSuccessCount, m_stats.localizationAttemptCount));
                }
            }
            m_bIsLocalizing = false;
        }
Example #4
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // CAMERA IMAGE HANDLING
        XRCameraImage image;

        if (!m_ARCameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh OH");
        }
        return;

        Debug.Log("FRAME");

        XRCameraImagePlane greyscale = image.GetPlane(0);

        if (m_Texture == null || m_cachedWidth != image.width || m_cachedHeight != image.height)
        {
            Debug.Log("Updating Texture Parameters");
            // TODO: Check for orientation
            // Update Cached Values
            m_cachedWidth  = image.width;
            m_cachedHeight = image.height;

            // Make new Texture
            var format = TextureFormat.R8; // CAUSES WHITESCREENING IF RGB24
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        int w = image.width;
        int h = image.height;

        image.Dispose();

        // Process the image here:
        unsafe {
            Debug.Log("Processing the image");
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
            Utils.fastMatToTexture2D(imageMat, m_Texture, true, 0);
        }

        if (m_CachedOrientation != Screen.orientation)
        {
            Debug.Log("Configuring RawImage");
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(w, h);
        }

        // BuildGreyscaleTexture(out YTexture);

        m_Texture.Resize(Screen.width, Screen.height);

        m_Texture.Apply();
        m_RawImage.texture = (Texture)m_Texture;
        Debug.Log(m_Texture.GetPixel(300, 300));
        Debug.LogFormat("Texture Dimensions: {0} x {1}", m_Texture.width, m_Texture.height);
        // m_RawImage.SetNativeSize();
    }
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Cache image on touch
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);
                }
            }

            ComputerVisionAlgo(greyPtr);

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, true, 0);
        }

        m_RawImage.texture = (Texture)m_Texture;
    }
Example #6
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // CAMERA IMAGE HANDLING
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        // Debug.LogFormat("Dimensions: {0}\n\t Format: {1}\n\t Time: {2}\n\t ",
        //     image.dimensions, image.format, image.timestamp);

        Vector2 img_dim = image.dimensions;

        XRCameraImagePlane greyscale = image.GetPlane(0);

        if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
            Utils.matToTexture2D(outMat, m_Texture, true, 0);
        }

        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        // Debug.LogFormat("Raw Image Coords: {0}\n Raw Image Scale: {1}",
        //     m_RawImage.transform.position, m_RawImage.transform.localScale);

        m_RawImage.texture = (Texture)m_Texture;
        // Debug.Log(m_Texture.GetPixel(300, 300));
        // Debug.LogFormat("\n Texture Dimensions: {0} x {1}",
        //     m_Texture.width, m_Texture.height);
    }
Example #7
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // CAMERA IMAGE HANDLING
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2 img_dim = image.dimensions;

        XRCameraImagePlane greyscale = image.GetPlane(0);

        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
            e_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
            Utils.matToTexture2D(outMat, m_Texture, true, 0);
            Utils.matToTexture2D(erodeMat, e_Texture, false, 0);
        }

        Debug.Log(m_CachedOrientation);
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;
        e_RawImage.texture = (Texture)e_Texture;

        // double[] c_data = circMat.get(0, 0);
        // m_ImageInfo.text = string.Format("Circle Count: {0}\n Circle[0]: {1} x {2} -- {3}",
        // circMat.size().width, c_data[0], c_data[1], c_data[2]);
    }
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // CAMERA IMAGE HANDLING
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2 img_dim = image.dimensions;

        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Sets orientation if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
            Utils.matToTexture2D(outMat, m_Texture, true, 0);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // Creates 3D object from image processing data
        SendRaycastToPoint();
    }
Example #9
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // CAMERA IMAGE HANDLING
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2 img_dim = image.dimensions;

        XRCameraImagePlane greyscale = image.GetPlane(0);

        if (m_Texture == null || m_Texture.width != image.width || m_Texture.height != image.height)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();
            ComputerVisionAlgo(greyPtr);
            Utils.matToTexture2D(outMat, m_Texture, true, 0);
        }

        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;
    }
Example #10
0
        public void Localize()
        {
            XRCameraImage image;

            if (m_CameraManager.TryGetLatestImage(out image))
            {
                CoroutineJobLocalize j = new CoroutineJobLocalize();
                Camera cam             = Camera.main;
                j.rotation   = cam.transform.rotation;
                j.position   = cam.transform.position;
                j.intrinsics = ARHelper.GetIntrinsics(m_CameraManager);
                j.width      = image.width;
                j.height     = image.height;
                j.stats      = this.stats;
                j.pcr        = this.pcr;

                XRCameraImagePlane plane = image.GetPlane(0);                 // use the Y plane
                j.pixels = new byte[plane.data.Length];
                plane.data.CopyTo(j.pixels);
                jobs.Add(j);
                image.Dispose();
            }
        }
        private static object[] extractYUVFromImage(XRCameraImage image)
        {
            // Consider each image plane
            XRCameraImagePlane plane = image.GetPlane(0);
            var yRowStride           = plane.rowStride;
            var y = plane.data;

            XRCameraImagePlane plane2 = image.GetPlane(1);
            var uvRowStride           = plane2.rowStride;
            var uvPixelStride         = plane2.pixelStride;
            var u = plane2.data;

            XRCameraImagePlane plane3 = image.GetPlane(2);
            var v = plane3.data;

            byte[] yDst = new byte[y.Length];
            byte[] uDst = new byte[u.Length];
            byte[] vDst = new byte[v.Length];

            object[] objParams = new object[8];
            NativeArray <byte> .Copy(y, yDst);

            NativeArray <byte> .Copy(u, uDst);

            NativeArray <byte> .Copy(v, vDst);

            objParams[0] = yDst;
            objParams[1] = uDst;
            objParams[2] = vDst;
            objParams[3] = yRowStride;
            objParams[4] = uvRowStride;
            objParams[5] = uvPixelStride;
            objParams[6] = image.width;
            objParams[7] = image.height;

            return(objParams);
        }
Example #12
0
        private IEnumerator Capture(bool anchor)
        {
            yield return(new WaitForSeconds(0.25f));

            XRCameraImage image;

            if (m_CameraManager.TryGetLatestImage(out image))
            {
                CoroutineJobCapture j = new CoroutineJobCapture();
                j.onConnect         = onConnect;
                j.onFailedToConnect = onFailedToConnect;
                j.server            = this.server;
                j.token             = this.token;
                j.bank   = this.bank;
                j.run    = (int)(this.imageRun & 0xEFFFFFFF);
                j.index  = this.imageIndex++;
                j.anchor = anchor;

                Camera     cam = Camera.main;
                Quaternion _q  = cam.transform.rotation;
                Matrix4x4  r   = Matrix4x4.Rotate(new Quaternion(_q.x, _q.y, -_q.z, -_q.w));
                Vector3    _p  = cam.transform.position;
                Vector3    p   = new Vector3(_p.x, _p.y, -_p.z);
                j.rotation   = r;
                j.position   = p;
                j.intrinsics = ARHelper.GetIntrinsics(m_CameraManager);
                j.width      = image.width;
                j.height     = image.height;

                if (rgbCapture)
                {
                    var conversionParams = new XRCameraImageConversionParams
                    {
                        inputRect        = new RectInt(0, 0, image.width, image.height),
                        outputDimensions = new Vector2Int(image.width, image.height),
                        outputFormat     = TextureFormat.RGB24,
                        transformation   = CameraImageTransformation.None
                    };
                    int size = image.GetConvertedDataSize(conversionParams);
                    j.pixels   = new byte[size];
                    j.channels = 3;
                    GCHandle bufferHandle = GCHandle.Alloc(j.pixels, GCHandleType.Pinned);
                    image.Convert(conversionParams, bufferHandle.AddrOfPinnedObject(), j.pixels.Length);
                    bufferHandle.Free();
                }
                else
                {
                    XRCameraImagePlane plane = image.GetPlane(0);                     // use the Y plane
                    j.pixels   = new byte[plane.data.Length];
                    j.channels = 1;
                    plane.data.CopyTo(j.pixels);
                }

                j.sessionFirstImage = sessionFirstImage;
                if (sessionFirstImage)
                {
                    sessionFirstImage = false;
                }

                jobs.Add(j);
                image.Dispose();

                m_cameraShutterClick.Play();
            }
        }
Example #13
0
    // Get Image from the AR Camera, extract the raw data from the image
    private unsafe void CaptureARBuffer(ARCameraFrameEventArgs eventArgs)
    {
        // Get the image in the ARSubsystemManager.cameraFrameReceived callback

        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.LogError("Capture AR Buffer returns nothing!!!!!!");
            return;
        }

        CameraImageFormat  ddd1 = image.format;
        XRCameraImagePlane ss   = image.GetPlane(0);



        Matrix4x4 ddd2 = eventArgs.projectionMatrix.Value;

        Vector3    position1 = new Vector3();
        Quaternion rotation1 = new Quaternion();

        SerializedCameraData serializedCameraData = new SerializedCameraData()
        {
            Timestamp        = eventArgs.timestampNs.Value,
            Position         = position1,
            Rotation         = rotation1,
            ProjectionMatrix = eventArgs.projectionMatrix.Value
        };


        byte[] augmentByteArray = serializedCameraData.Serialize();

        //Matrix4x4 ddd22 = eventArgs.projectionMatrix;



        var conversionParams = new XRCameraImageConversionParams
        {
            // Get the full image
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample by 2
            outputDimensions = new Vector2Int(image.width, image.height),

            // Color image format
            outputFormat = ConvertFormat,

            // Flip across the x axis
            transformation = CameraImageTransformation.MirrorX

                             // Call ProcessImage when the async operation completes
        };
        // See how many bytes we need to store the final image.
        int size = image.GetConvertedDataSize(conversionParams);

        Debug.LogError("OnCameraFrameReceived, size == " + size + "w:" + image.width + " h:" + image.height + " planes=" + image.planeCount);


        // Allocate a buffer to store the image
        var buffer = new NativeArray <byte>(size, Allocator.Temp);

        // Extract the image data
        image.Convert(conversionParams, new System.IntPtr(buffer.GetUnsafePtr()), buffer.Length);

        // The image was converted to RGBA32 format and written into the provided buffer
        // so we can dispose of the CameraImage. We must do this or it will leak resources.

        byte[] bytes = buffer.ToArray();
        monoProxy.StartCoroutine(PushFrame(bytes, image.width, image.height,
                                           () => { image.Dispose(); buffer.Dispose(); }));
    }
Example #14
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        ThreeStage_AR_Controller ARC = m_ARSessionManager.GetComponent <ThreeStage_AR_Controller>();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    if (!spa_full)   // Stage 1: Finding World Markers
                    {
                        m_ImageInfo.text = string.Format("Number of markers detected: {0} \n world_nulls {1}",
                                                         count_src_nulls(), ARC.count_world_nulls());
                        ArucoDetection();

                        ARC.SetWorldPoints();
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);
                    }
                    else   // Stage 2: Rectification of Captured Image Faces
                    {
                        m_ImageInfo.text = String.Format("world_nulls: {0}", ARC.count_world_nulls());
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);

                        proj_point_array = ARC.GetScreenpoints();

                        GetFaces(ref proj_point_array);
                        ShowFaces(img_dim);
                    }

                    Core.flip(cached_initMat, outMat, 0);
                }
            }

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        if (spa_full)   // Stage 3: Real-time warping
        {
            ARC.SetScreenPoints();
            proj_point_array = ARC.GetScreenpoints();

            for (int i = 0; i < 3; i++)
            {
                m_ImageInfo.text = String.Format("Stage 3: {0}", i);
                HomographyTransform(i);
            }

            CombineWarped();

            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size());
    }
Example #15
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    // Detect reference points
                    BlobDetection();

                    Rectify(ref face1Array, ref cached_homoMat1);
                    Rectify(ref face2Array, ref cached_homoMat2);
                    Rectify(ref face3Array, ref cached_homoMat3);

                    // Display cached top-down
                    Texture2D topTexture1 = new Texture2D((int)img_dim.x, (int)img_dim.y, TextureFormat.RGBA32, false);
                    Utils.matToTexture2D(cached_homoMat1, topTexture1, false, 0);
                    m_TopImage1.texture = (Texture)topTexture1;

                    Texture2D topTexture2 = new Texture2D((int)img_dim.x, (int)img_dim.y, TextureFormat.RGBA32, false);
                    Utils.matToTexture2D(cached_homoMat2, topTexture2, false, 0);
                    m_TopImage2.texture = (Texture)topTexture2;

                    Texture2D topTexture3 = new Texture2D((int)img_dim.x, (int)img_dim.y, TextureFormat.RGBA32, false);
                    Utils.matToTexture2D(cached_homoMat3, topTexture3, false, 0);
                    m_TopImage3.texture = (Texture)topTexture3;

                    Debug.Log("OCFR: 510");
                }
            }

            // Warps cached top-down and gets outMat.
            // HomographyTransform(ref cached_homoMat);

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        // Debug.Log("OCFR: 529");

        m_RawImage.texture = (Texture)m_Texture;

        m_ImageInfo.text = string.Format("Number of Blobs: {0}", keyMat.rows());
    }
 public static void GetPlaneData(out byte[] pixels, XRCameraImage image)
 {
     XRCameraImagePlane plane = image.GetPlane(0);               // use the Y plane
 public static void GetPlaneDataFast(ref IntPtr pixels, XRCameraImage image)
 {
     XRCameraImagePlane plane = image.GetPlane(0);               // use the Y plane
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    Debug.Log("OFCR: ArucoDetection()");
                    // Detect reference points
                    ArucoDetection();

                    Debug.Log("OFCR: GetFaces()");
                    // Get Rectified Textures
                    GetFaces();
                    ShowFaces(img_dim);
                }
            }

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        if (spa_full)
        {
            // Homography shit
            // Debug.Log(" ALL SRC FOUND");
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size());
    }
Example #19
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        ThreeStage_AR_Controller ARC = m_ARSessionManager.GetComponent <ThreeStage_AR_Controller>();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    // if (!spa_full) { // Stage 1: Finding World Markers
                    if (touch.position.x < image.width / 2) // Stage 1: Finding World Markers
                    // Detect the markers (in c1 space)
                    {
                        ArucoDetection();

                        // Raycast and get World points
                        ARC.SetWorldPoints();

                        // (For Testing) Extract c2 points and draw onto output.
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);
                    }
                    else   // Stage 2: Rectification of Captured Image Faces
                           // Extract c2 points and draw onto output
                    {
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);

                        // Caching the c2 world position
                        ARC.CacheCamPoints();

                        // Getting dest points
                        proj_point_array = ARC.GetScreenpoints();

                        // Rectify Faces and Display them
                        GetFaces(ref proj_point_array);
                        ShowFaces(img_dim);
                    }

                    Core.flip(cached_initMat, outMat, 0);
                }
            }

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        if (spa_full)   // Stage 3: Real-time warping
        // Get c2 screenpoints
        {
            ARC.SetScreenPoints();
            proj_point_array = ARC.GetScreenpoints();

            // Get the closest camera position
            int closest_capture = ARC.GetClosestIndex();

            // Warp rectified closest capture Mats for each face dependent on current position
            for (int i = 0; i < 3; i++)
            {
                m_ImageInfo.text = String.Format("Stage 3: {0}", i);
                HomographyTransform(i, closest_capture);
            }

            m_ImageInfo.text = String.Format("closest_capture : {0}", closest_capture);

            // Combined the warped images into one image
            CombineWarped();

            // Display the combined image
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size());
    }