コード例 #1
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    // Detect reference points
                    BlobDetection();

                    // Display cached top-down
                    Texture2D topTexture = new Texture2D((int)img_dim.x, (int)img_dim.y, TextureFormat.RGBA32, false);
                    Utils.matToTexture2D(cached_homoMat, topTexture, false, 0);
                    m_TopImage.texture = (Texture)topTexture;

                    // DEBUG: Display detected reference poitns (SCR)
                    // trackScreenPoints();
                }
            }

            // Warps cached top-down and gets outMat.
            HomographyTransform(greyPtr);
            // outMat = cached_homoMat;

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        m_ImageInfo.text = string.Format("Number of Blobs: {0}", keyMat.rows());
    }
コード例 #2
0
    void FrameChanged(ARCameraFrameEventArgs args)
    {
        if (args.lightEstimation.averageBrightness.HasValue)
        {
            brightness = args.lightEstimation.averageBrightness.Value;
            if (brightness.Value > 0.5)
            {
                m_Light.intensity = brightness.Value * 2;
            }
            else
            {
                m_Light.intensity = brightness.Value;
            }
            Debug.Log("Brightness " + brightness);
        }

        if (args.lightEstimation.averageColorTemperature.HasValue)
        {
            colorTemperature         = args.lightEstimation.averageColorTemperature.Value;
            m_Light.colorTemperature = colorTemperature.Value;
        }

        if (args.lightEstimation.colorCorrection.HasValue)
        {
            colorCorrection = args.lightEstimation.colorCorrection.Value;
            m_Light.color   = colorCorrection.Value;
        }

        if (args.lightEstimation.mainLightDirection.HasValue)
        {
            mainLightDirection         = args.lightEstimation.mainLightDirection;
            m_Light.transform.rotation = Quaternion.LookRotation(mainLightDirection.Value);
        }

        if (args.lightEstimation.mainLightColor.HasValue)
        {
            mainLightColor = args.lightEstimation.mainLightColor;

#if PLATFORM_ANDROID
            // ARCore needs to apply energy conservation term (1 / PI) and be placed in gamma
            m_Light.color = mainLightColor.Value / Mathf.PI;
            m_Light.color = m_Light.color.gamma;

            // ARCore returns color in HDR format (can be represented as FP16 and have values above 1.0)
            var camera = m_CameraManager.GetComponentInParent <Camera>();
            if (camera == null || !camera.allowHDR)
            {
                Debug.LogWarning($"HDR Rendering is not allowed.  Color values returned could be above the maximum representable value.");
            }
#endif
        }

        if (args.lightEstimation.mainLightIntensityLumens.HasValue)
        {
            mainLightIntensityLumens = args.lightEstimation.mainLightIntensityLumens;
            m_Light.intensity        = args.lightEstimation.averageMainLightBrightness.Value;
        }

        if (args.lightEstimation.ambientSphericalHarmonics.HasValue)
        {
            sphericalHarmonics          = args.lightEstimation.ambientSphericalHarmonics;
            RenderSettings.ambientMode  = AmbientMode.Skybox;
            RenderSettings.ambientProbe = sphericalHarmonics.Value;
        }
    }
コード例 #3
0
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        //Get the latest image
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            return;
        }

        timeCount += Time.deltaTime;

        //select the format of the texture
        var format = TextureFormat.RGBA32;

        //check if the texture changed, and only if so create a new one with the new changes
        if (texture == null || texture.width != image.width || texture.height != image.height)
        {
            texture = new Texture2D(image.width, image.height, format, false);
        }

        //mirror on the Y axis so that it fits open cv standarts
        var conversionParams = new XRCameraImageConversionParams(image, format, CameraImageTransformation.MirrorY);

        // try to apply raw texture data to the texture
        var rawTextureData = texture.GetRawTextureData <byte>();

        try
        {
            image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
        }
        finally
        {
            //every Mat must be released before new data is assigned!
            image.Dispose();
        }

        //apply texture
        texture.Apply();

        texParam.FlipHorizontally = false;

        //create a Mat class from the texture
        imgBuffer = ARucoUnityHelper.TextureToMat(texture, texParam);

        // Increment thread counter
        if (threadCounter == 0 && timeCount >= markerDetectorPauseTime &&
            arCamera.velocity.magnitude <= maxPositionChangePerFrame && cameraPoseTracker.rotationChange <= maxRotationChangePerFrameDegrees)
        {
            //copy the buffer data to the img Mat
            imgBuffer.CopyTo(img);
            Interlocked.Increment(ref threadCounter);
            timeCount = 0;
        }

        updateThread = true;

        //Show the texture if needed
        if (showOpenCvTexture)
        {
            openCvTexture.texture = ARucoUnityHelper.MatToTexture(imgBuffer, texture);
        }

        //release imgBuffer Mat
        imgBuffer.Release();
    }
コード例 #4
0
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        if (m_DoObjectRecognition)
        {
            m_DoObjectRecognition = false;
            // Attempt to get the latest camera image. If this method succeeds,
            // it acquires a native resource that must be disposed (see below).
            XRCameraImage image;
            if (!cameraManager.TryGetLatestImage(out image))
            {
                return;
            }

            Console.WriteLine(string.Format(
                                  "Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
                                  image.width, image.height, image.planeCount, image.timestamp, image.format));

            // Once we have a valid XRCameraImage, we can access the individual image "planes"
            // (the separate channels in the image). XRCameraImage.GetPlane provides
            // low-overhead access to this data. This could then be passed to a
            // computer vision algorithm. Here, we will convert the camera image
            // to an RGBA texture and draw it on the screen.

            // Choose an RGBA format.
            // See XRCameraImage.FormatSupported for a complete list of supported formats.
            var format       = TextureFormat.RGB24;
            var targetWidth  = image.width / 4;
            var targetHeight = image.height / 4;

            if (m_Texture == null || m_Texture.width != targetWidth || m_Texture.height != targetHeight)
            {
                m_Texture = new Texture2D(targetWidth, targetHeight, format, false);
            }

            // Convert the image to format, flipping the image across the Y axis.
            // We can also get a sub rectangle, but we'll get the full image here.
            var conversionParams = new XRCameraImageConversionParams(image, format, CameraImageTransformation.None);
            conversionParams.outputDimensions = new Vector2Int(targetWidth, targetHeight);

            // Texture2D allows us write directly to the raw texture data
            // This allows us to do the conversion in-place without making any copies.
            var rawTextureData = m_Texture.GetRawTextureData <byte>();
            try
            {
                image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
            }
            finally
            {
                // We must dispose of the XRCameraImage after we're finished
                // with it to avoid leaking native resources.
                image.Dispose();
            }

            // Apply the updated texture data to our texture
            m_Texture.Apply();

            // Set the RawImage's texture so we can visualize it.
            m_RawImage.texture = m_Texture;

            StartCoroutine(HandleObjectRecognitionBaidu());
        }
    }
コード例 #5
0
 void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     UpdateCameraImage();
 }
コード例 #6
0
 public void Process(ARCameraFrameEventArgs a)
 {
     lastDisplayMatrix = a.displayMatrix.Value;
 }
コード例 #7
0
 private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     RefreshCameraFeedTexture();
     DisplayInfo();
 }
コード例 #8
0
    unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Attempt to get the latest camera image. If this method succeeds,
        // it acquires a native resource that must be disposed (see below).
        CameraImage image;

        if (!ARSubsystemManager.cameraSubsystem.TryGetLatestImage(out image))
        {
            return;
        }

        // Display some information about the camera image
        m_ImageInfo.text = string.Format(
            "Image info:\n\twidth: {0}\n\theight: {1}\n\tplaneCount: {2}\n\ttimestamp: {3}\n\tformat: {4}",
            image.width, image.height, image.planeCount, image.timestamp, image.format);

        // Once we have a valid CameraImage, we can access the individual image "planes"
        // (the separate channels in the image). CameraImage.GetPlane provides
        // low-overhead access to this data. This could then be passed to a
        // computer vision algorithm. Here, we will convert the camera image
        // to an RGBA texture and draw it on the screen.

        // Choose an RGBA format.
        // See CameraImage.FormatSupported for a complete list of supported formats.
        var format = TextureFormat.RGBA32;

        if (m_Texture == null)
        {
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        // Convert the image to format, flipping the image across the Y axis.
        // We can also get a sub rectangle, but we'll get the full image here.
        var conversionParams = new CameraImageConversionParams(image, format, CameraImageTransformation.MirrorY);

#if UNITY_2018_2_OR_NEWER
        // In 2018.2+, Texture2D allows us write directly to the raw texture data
        // This allows us to do the conversion in-place without making any copies.
        var rawTextureData = m_Texture.GetRawTextureData <byte>();
        image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
#else
        // In 2018.1, Texture2D didn't have this feature, so we'll create
        // a temporary buffer and perform the conversion using that data.
        int size           = image.GetConvertedDataSize(conversionParams);
        var rawTextureData = new NativeArray <byte>(size, Allocator.Temp);
        var ptr            = new IntPtr(rawTextureData.GetUnsafePtr());
        image.Convert(conversionParams, ptr, rawTextureData.Length);
        m_Texture.LoadRawTextureData(ptr, rawTextureData.Length);
        rawTextureData.Dispose();
#endif

        // We must dispose of the CameraImage after we're finished
        // with it to avoid leaking native resources.
        image.Dispose();

        // Apply the updated texture data to our texture
        m_Texture.Apply();

        // Set the RawImage's texture so we can visualize it.
        m_RawImage.texture = m_Texture;
    }
コード例 #9
0
 void ChangeLighting(ARCameraFrameEventArgs args)
 {
     // TODO: Add the code for your environmental lighting adjustments here.
 }
        public void SendFrameReceived(IConnectionProvider connectionProvider, ARCameraFrameEventArgs frameEventArgs)
        {
            if (ARSubsystemManager.cameraSubsystem == null)
            {
                return;
            }

            writer.BeginMessage(RemoteMessage.ARCameraFrameData);

            writer.Write(frameEventArgs.lightEstimation.averageBrightness.GetValueOrDefault());
            writer.Write(frameEventArgs.lightEstimation.averageColorTemperature.GetValueOrDefault());
            writer.Write(frameEventArgs.time.GetValueOrDefault());

            var gotDisplayMatrix = ARSubsystemManager.cameraSubsystem.TryGetDisplayMatrix(ref dm);

            writer.Write(gotDisplayMatrix);

            if (gotDisplayMatrix)
            {
                for (int i = 0; i < 16; i++)
                {
                    writer.Write(dm[i]);
                }
            }

            Matrix4x4 pm = Matrix4x4.identity;
            var       gotProjectionMatrix = ARSubsystemManager.cameraSubsystem.TryGetProjectionMatrix(ref pm);

            writer.Write(gotProjectionMatrix);

            if (gotProjectionMatrix)
            {
                for (int i = 0; i < 16; i++)
                {
                    writer.Write(pm[i]);
                }
            }


            byte fieldMask = 0;

            if (frameEventArgs.time.HasValue)
            {
                fieldMask |= 1 << 0;
            }
            if (frameEventArgs.lightEstimation.averageBrightness.HasValue)
            {
                fieldMask |= 1 << 1;
            }
            if (frameEventArgs.lightEstimation.averageColorTemperature.HasValue)
            {
                fieldMask |= 1 << 2;
            }
            if (gotProjectionMatrix)
            {
                fieldMask |= 1 << 3;
            }
            if (gotDisplayMatrix)
            {
                fieldMask |= 1 << 4;
            }

            writer.Write(fieldMask);

            writer.EndMessage(stream);

            if (connectionProvider != null)
            {
                connectionProvider.SendMessage(stream);
            }
        }
コード例 #11
0
        private void OnFrameUpdate(ARCameraFrameEventArgs frameEventArgs)
        {
            if (!ShouldUpdateARCoreSession())
            {
                return;
            }

            if (_sessionHandle == IntPtr.Zero)
            {
                return;
            }

            if (_frameHandle != IntPtr.Zero)
            {
                SessionApi.ReleaseFrame(_frameHandle);
                _frameHandle = IntPtr.Zero;
            }

            if (_arKitSession != null && _cameraManager != null && _arKitSession.enabled)
            {
                var cameraParams = new XRCameraParams
                {
                    zNear             = _cameraManager.GetComponent <Camera>().nearClipPlane,
                    zFar              = _cameraManager.GetComponent <Camera>().farClipPlane,
                    screenWidth       = Screen.width,
                    screenHeight      = Screen.height,
                    screenOrientation = Screen.orientation
                };

                if (!_cameraManager.subsystem.TryGetLatestFrame(
                        cameraParams, out XRCameraFrame frame))
                {
                    Debug.LogWarning("XRCamera's latest frame is not available now.");
                    return;
                }

                if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
                {
                    Debug.LogWarning("ARKit Plugin Frame is not ready.");
                    return;
                }

                var status = ExternApi.ArSession_updateAndAcquireArFrame(
                    _sessionHandle, frame.FrameHandle(), ref _frameHandle);
                if (status != ApiArStatus.Success)
                {
                    Debug.LogErrorFormat("Failed to update and acquire ARFrame with error: " +
                                         "{0}", status);
                    return;
                }

                if (_cachedCloudAnchorMode !=
                    ARCoreExtensions._instance.ARCoreExtensionsConfig.CloudAnchorMode)
                {
                    ApiCloudAnchorMode mode = (ApiCloudAnchorMode)
                                              ARCoreExtensions._instance.ARCoreExtensionsConfig.CloudAnchorMode;
                    status = ExternApi.ArSession_setCloudAnchorMode(_sessionHandle, mode);
                    if (status != ApiArStatus.Success)
                    {
                        Debug.LogErrorFormat(
                            "Could not set CloudAnchorMode {0} ({1}).", mode, status);
                        return;
                    }
                    else
                    {
                        Debug.LogFormat("Set Cloud Anchor Mode to {0}.", mode);
                    }

                    _cachedCloudAnchorMode =
                        ARCoreExtensions._instance.ARCoreExtensionsConfig.CloudAnchorMode;
                }
            }
        }
コード例 #12
0
ファイル: HandVRMain.cs プロジェクト: NON906/HandMR
        void onCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
        {
            if (isStart_)
            {
                if (nextUpdateFrameTime_ > Time.realtimeSinceStartup)
                {
                    return;
                }
                //nextUpdateFrameTime_ += INTERVAL_TIME;
                nextUpdateFrameTime_ = Time.realtimeSinceStartup + INTERVAL_TIME;
            }

            if (!cameraManager_.TryAcquireLatestCpuImage(out XRCpuImage image))
            {
                return;
            }

            if (!isStart_)
            {
                height_ = RESIZE_HEIGHT;
                if (image.height < height_)
                {
                    height_ = image.height;
                }

                width_ = height_ * image.width / image.height;

#if UNITY_ANDROID && !UNITY_EDITOR
                using (AndroidJavaClass unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer"))
                    using (AndroidJavaObject currentUnityActivity = unityPlayer.GetStatic <AndroidJavaObject>("currentActivity"))
                    {
                        multiHandMain_ = new AndroidJavaObject("online.mumeigames.mediapipe.apps.multihandtrackinggpu.MultiHandMain", currentUnityActivity, width_, height_, BothHand ? 2 : 1, 0);
                    }

                multiHandMain_.Call("startRunningGraph");
                nextUpdateFrameTime_ = Time.realtimeSinceStartup + START_DELAY_TIME;
#endif

#if UNITY_IOS && !UNITY_EDITOR
                IntPtr graphName = Marshal.StringToHGlobalAnsi("handcputogpu");
                multiHandSetup(graphName, width_, height_, BothHand ? 2 : 1, 0);
                Marshal.FreeHGlobal(graphName);

                multiHandStartRunningGraph();
                nextUpdateFrameTime_ = Time.realtimeSinceStartup + START_DELAY_TIME;
#endif

                focalLength_ = 0.5 / Mathf.Tan(FixedFieldOfView * Mathf.Deg2Rad * 0.5f);
                hand3dInitWithValues(focalLength_, focalLength_, 0.5, 0.5);

                resetHandValues();

                isStart_ = true;

                image.Dispose();
                return;
            }

            var conversionParams = new XRCpuImage.ConversionParams
            {
                inputRect        = new RectInt(0, 0, image.width, image.height),
                outputDimensions = new Vector2Int(width_, height_),
#if UNITY_IOS && !UNITY_EDITOR
                outputFormat = TextureFormat.BGRA32,
#else
                outputFormat = TextureFormat.ARGB32,
#endif
                transformation = XRCpuImage.Transformation.None
            };

            unsafe
            {
                int size   = image.GetConvertedDataSize(conversionParams);
                var buffer = new NativeArray <byte>(size, Allocator.Temp);
                image.Convert(conversionParams, new IntPtr(buffer.GetUnsafePtr()), buffer.Length);
                image.Dispose();

                byte[] frameImage = new byte[size];
                buffer.CopyTo(frameImage);

#if UNITY_ANDROID && !UNITY_EDITOR
                sbyte[] frameImageSigned = Array.ConvertAll(frameImage, b => unchecked ((sbyte)b));
                multiHandMain_.Call("setFrame", frameImageSigned);
#endif

#if UNITY_IOS && !UNITY_EDITOR
                IntPtr frameIntPtr = Marshal.AllocHGlobal(frameImage.Length * Marshal.SizeOf <byte>());
                Marshal.Copy(frameImage, 0, frameIntPtr, frameImage.Length);
                multiHandSetFrame(frameIntPtr, frameImage.Length);
                Marshal.FreeHGlobal(frameIntPtr);
#endif

                buffer.Dispose();
            }
        }
コード例 #13
0
    // Get Image from the AR Camera, extract the raw data from the image
    private unsafe void CaptureARBuffer(ARCameraFrameEventArgs eventArgs)
    {
        // Get the image in the ARSubsystemManager.cameraFrameReceived callback

        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.LogError("Capture AR Buffer returns nothing!!!!!!");
            return;
        }

        CameraImageFormat  ddd1 = image.format;
        XRCameraImagePlane ss   = image.GetPlane(0);



        Matrix4x4 ddd2 = eventArgs.projectionMatrix.Value;

        Vector3    position1 = new Vector3();
        Quaternion rotation1 = new Quaternion();

        SerializedCameraData serializedCameraData = new SerializedCameraData()
        {
            Timestamp        = eventArgs.timestampNs.Value,
            Position         = position1,
            Rotation         = rotation1,
            ProjectionMatrix = eventArgs.projectionMatrix.Value
        };


        byte[] augmentByteArray = serializedCameraData.Serialize();

        //Matrix4x4 ddd22 = eventArgs.projectionMatrix;



        var conversionParams = new XRCameraImageConversionParams
        {
            // Get the full image
            inputRect = new RectInt(0, 0, image.width, image.height),

            // Downsample by 2
            outputDimensions = new Vector2Int(image.width, image.height),

            // Color image format
            outputFormat = ConvertFormat,

            // Flip across the x axis
            transformation = CameraImageTransformation.MirrorX

                             // Call ProcessImage when the async operation completes
        };
        // See how many bytes we need to store the final image.
        int size = image.GetConvertedDataSize(conversionParams);

        Debug.LogError("OnCameraFrameReceived, size == " + size + "w:" + image.width + " h:" + image.height + " planes=" + image.planeCount);


        // Allocate a buffer to store the image
        var buffer = new NativeArray <byte>(size, Allocator.Temp);

        // Extract the image data
        image.Convert(conversionParams, new System.IntPtr(buffer.GetUnsafePtr()), buffer.Length);

        // The image was converted to RGBA32 format and written into the provided buffer
        // so we can dispose of the CameraImage. We must do this or it will leak resources.

        byte[] bytes = buffer.ToArray();
        monoProxy.StartCoroutine(PushFrame(bytes, image.width, image.height,
                                           () => { image.Dispose(); buffer.Dispose(); }));
    }
コード例 #14
0
 private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     Debug.LogError("OnCameraFrameReceived");
     CaptureARBuffer(eventArgs);
 }
コード例 #15
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        ThreeStage_AR_Controller ARC = m_ARSessionManager.GetComponent <ThreeStage_AR_Controller>();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    // if (!spa_full) { // Stage 1: Finding World Markers
                    if (touch.position.x < image.width / 2) // Stage 1: Finding World Markers
                    // Detect the markers (in c1 space)
                    {
                        ArucoDetection();

                        // Raycast and get World points
                        ARC.SetWorldPoints();

                        // (For Testing) Extract c2 points and draw onto output.
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);
                    }
                    else   // Stage 2: Rectification of Captured Image Faces
                           // Extract c2 points and draw onto output
                    {
                        ARC.SetScreenPoints();
                        DrawScreenPoints(ARC);

                        // Caching the c2 world position
                        ARC.CacheCamPoints();

                        // Getting dest points
                        proj_point_array = ARC.GetScreenpoints();

                        // Rectify Faces and Display them
                        GetFaces(ref proj_point_array);
                        ShowFaces(img_dim);
                    }

                    Core.flip(cached_initMat, outMat, 0);
                }
            }

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        if (spa_full)   // Stage 3: Real-time warping
        // Get c2 screenpoints
        {
            ARC.SetScreenPoints();
            proj_point_array = ARC.GetScreenpoints();

            // Get the closest camera position
            int closest_capture = ARC.GetClosestIndex();

            // Warp rectified closest capture Mats for each face dependent on current position
            for (int i = 0; i < 3; i++)
            {
                m_ImageInfo.text = String.Format("Stage 3: {0}", i);
                HomographyTransform(i, closest_capture);
            }

            m_ImageInfo.text = String.Format("closest_capture : {0}", closest_capture);

            // Combined the warped images into one image
            CombineWarped();

            // Display the combined image
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size());
    }
コード例 #16
0
 void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     UpdateCameraImage();
     UpdateEnvironmentDepthImage();
     UpdateEnvironmentConfidenceImage();
 }
コード例 #17
0
 void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     SetupCameraIfNecessary();
 }
コード例 #18
0
        void FrameChanged(ARCameraFrameEventArgs args)
        {
            if (args.lightEstimation.averageBrightness.HasValue)
            {
                brightness        = args.lightEstimation.averageBrightness.Value;
                m_Light.intensity = brightness.Value;
            }
            else
            {
                brightness = null;
            }

            if (args.lightEstimation.averageColorTemperature.HasValue)
            {
                colorTemperature         = args.lightEstimation.averageColorTemperature.Value;
                m_Light.colorTemperature = colorTemperature.Value;
            }
            else
            {
                colorTemperature = null;
            }

            if (args.lightEstimation.colorCorrection.HasValue)
            {
                colorCorrection = args.lightEstimation.colorCorrection.Value;
                m_Light.color   = colorCorrection.Value;
            }
            else
            {
                colorCorrection = null;
            }

            if (args.lightEstimation.mainLightDirection.HasValue)
            {
                mainLightDirection         = args.lightEstimation.mainLightDirection;
                m_Light.transform.rotation = Quaternion.LookRotation(mainLightDirection.Value);
            }

            if (args.lightEstimation.mainLightColor.HasValue)
            {
                mainLightColor = args.lightEstimation.mainLightColor;
                m_Light.color  = mainLightColor.Value;
            }
            else
            {
                mainLightColor = null;
            }

            if (args.lightEstimation.mainLightIntensityLumens.HasValue)
            {
                mainLightIntensityLumens = args.lightEstimation.mainLightIntensityLumens;
                m_Light.intensity        = args.lightEstimation.averageMainLightBrightness.Value;
            }
            else
            {
                mainLightIntensityLumens = null;
            }

            if (args.lightEstimation.ambientSphericalHarmonics.HasValue)
            {
                sphericalHarmonics          = args.lightEstimation.ambientSphericalHarmonics;
                RenderSettings.ambientMode  = AmbientMode.Skybox;
                RenderSettings.ambientProbe = sphericalHarmonics.Value;
            }
            else
            {
                sphericalHarmonics = null;
            }
        }
コード例 #19
0
 /// <summary>
 /// Called by ARFoundation to indicate that there is a new frame to process
 /// </summary>
 /// <param name="obj">Information about the frame.  Not used.</param>
 private void ArCameraManager_frameReceived(ARCameraFrameEventArgs obj)
 {
     ProcessLatestFrame();
 }
コード例 #20
0
    void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        // Camera data extraction
        XRCameraImage image;

        if (!cameraManager.TryGetLatestImage(out image))
        {
            Debug.Log("Uh Oh");
            return;
        }

        Vector2            img_dim   = image.dimensions;
        XRCameraImagePlane greyscale = image.GetPlane(0);

        // Instantiates new m_Texture if necessary
        if (m_Texture == null || m_Texture.width != image.width)
        {
            var format = TextureFormat.RGBA32;
            m_Texture = new Texture2D(image.width, image.height, format, false);
        }

        image.Dispose();

        // Process the image here:
        unsafe {
            IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr();

            // TOUCH: Detect corners and set as source points
            if (Input.touchCount > 0)
            {
                Touch touch = Input.GetTouch(0);
                if (touch.phase == TouchPhase.Began)
                {
                    // Cache original image
                    Utils.copyToMat(greyPtr, cached_initMat);

                    Debug.Log("OFCR: ArucoDetection()");
                    // Detect reference points
                    ArucoDetection();

                    Debug.Log("OFCR: GetFaces()");
                    // Get Rectified Textures
                    GetFaces();
                    ShowFaces(img_dim);
                }
            }

            // Displays OpenCV Mat as a Texture
            Utils.matToTexture2D(outMat, m_Texture, false, 0);
        }

        if (spa_full)
        {
            // Homography shit
            // Debug.Log(" ALL SRC FOUND");
        }

        // Sets orientation of screen if necessary
        if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation)
        {
            // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation
            m_CachedOrientation = Screen.orientation;
            ConfigureRawImageInSpace(img_dim);
        }

        m_RawImage.texture = (Texture)m_Texture;

        // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size());
    }
コード例 #21
0
 void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     l.intensity        = eventArgs.lightEstimation.averageBrightness.Value;
     l.colorTemperature = eventArgs.lightEstimation.averageColorTemperature.Value;
 }
コード例 #22
0
 private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
 {
     CaptureARBuffer();
 }