private void UpdateWordResultPoses(Camera arCamera, IEnumerable <QCARManagerImpl.WordResultData> wordResults)
    {
        QCARAbstractBehaviour behaviour = (QCARAbstractBehaviour)UnityEngine.Object.FindObjectOfType(typeof(QCARAbstractBehaviour));

        if (behaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
        }
        else
        {
            Rect viewportRectangle               = behaviour.GetViewportRectangle();
            bool videoBackGroundMirrored         = behaviour.VideoBackGroundMirrored;
            CameraDevice.VideoModeData videoMode = behaviour.GetVideoMode();
            foreach (QCARManagerImpl.WordResultData data2 in wordResults)
            {
                WordResultImpl impl        = (WordResultImpl)this.mTrackedWords[data2.id];
                Vector3        position    = arCamera.transform.TransformPoint(data2.pose.position);
                Quaternion     orientation = data2.pose.orientation;
                Quaternion     quaternion2 = (arCamera.transform.rotation * orientation) * Quaternion.AngleAxis(270f, Vector3.left);
                impl.SetPose(position, quaternion2);
                impl.SetStatus(data2.status);
                OrientedBoundingBox cameraFrameObb = new OrientedBoundingBox(data2.orientedBoundingBox.center, data2.orientedBoundingBox.halfExtents, data2.orientedBoundingBox.rotation);
                impl.SetObb(QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(cameraFrameObb, viewportRectangle, videoBackGroundMirrored, videoMode));
            }
            if (this.mWordPrefabCreationMode == WordPrefabCreationMode.DUPLICATE)
            {
                this.UpdateWordBehaviourPoses();
            }
        }
    }
 internal void Update()
 {
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         if (!this.IsTextureSizeAvailable && this.mWebCamTexture.DidUpdateThisFrame)
         {
             QCARRenderer.Vec2I         resampledTextureSize = this.mWebCamProfile.ResampledTextureSize;
             CameraDevice.VideoModeData data = new CameraDevice.VideoModeData {
                 width     = resampledTextureSize.x,
                 height    = resampledTextureSize.y,
                 frameRate = this.mWebCamProfile.RequestedFPS
             };
             this.mVideoModeData = data;
             QCARRenderer.VideoTextureInfo info = new QCARRenderer.VideoTextureInfo {
                 imageSize   = resampledTextureSize,
                 textureSize = resampledTextureSize
             };
             this.mVideoTextureInfo      = info;
             this.mTextureRenderer       = new TextureRenderer(this.mWebCamTexture.Texture, this.mRenderTextureLayer, resampledTextureSize);
             this.mBufferReadTexture     = new Texture2D(resampledTextureSize.x, resampledTextureSize.y);
             this.mReadPixelsRect        = new Rect(0f, 0f, (float)resampledTextureSize.x, (float)resampledTextureSize.y);
             this.IsTextureSizeAvailable = true;
         }
         this.mBgRenderingTexBehaviour.CheckAndSetActive(QCARRenderer.Instance.DrawVideoBackground);
     }
 }
Beispiel #3
0
    public override bool GetRegionOfInterest(out Rect detectionRegion, out Rect trackingRegion)
    {
        QCARAbstractBehaviour behaviour = (QCARAbstractBehaviour)UnityEngine.Object.FindObjectOfType(typeof(QCARAbstractBehaviour));

        if (behaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            detectionRegion = new Rect();
            trackingRegion  = new Rect();
            return(false);
        }
        Rect viewportRectangle       = behaviour.GetViewportRectangle();
        bool videoBackGroundMirrored = behaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoMode = CameraDevice.Instance.GetVideoMode(behaviour.CameraDeviceMode);
        IntPtr detectionROI = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(RectangleIntData)));
        IntPtr trackingROI  = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(RectangleIntData)));

        QCARWrapper.Instance.TextTrackerGetRegionOfInterest(detectionROI, trackingROI);
        RectangleIntData camSpaceRectData = (RectangleIntData)Marshal.PtrToStructure(detectionROI, typeof(RectangleIntData));
        RectangleIntData data3            = (RectangleIntData)Marshal.PtrToStructure(trackingROI, typeof(RectangleIntData));

        Marshal.FreeHGlobal(detectionROI);
        Marshal.FreeHGlobal(trackingROI);
        detectionRegion = this.ScreenSpaceRectFromCamSpaceRectData(camSpaceRectData, viewportRectangle, videoBackGroundMirrored, videoMode);
        trackingRegion  = this.ScreenSpaceRectFromCamSpaceRectData(data3, viewportRectangle, videoBackGroundMirrored, videoMode);
        return(true);
    }
Beispiel #4
0
    // wait for the first web cam frame to set config structs with correct size values
    // (mWebCamTexture.width and height are set to default values before the first frame is captured)
    public void Update()
    {
        if (!IsTextureSizeAvailable && mWebCamTexture.DidUpdateThisFrame)
        {
            QCARRenderer.Vec2I resampledSize = mWebCamProfile.ResampledTextureSize;

            mVideoModeData = new CameraDevice.VideoModeData
            {
                width     = resampledSize.x,
                height    = resampledSize.y,
                frameRate = mWebCamProfile.RequestedFPS                          // real fps not known, but not used in Unity any way...
            };

            mVideoTextureInfo = new QCARRenderer.VideoTextureInfo
            {
                imageSize   = resampledSize,
                textureSize = resampledSize
            };

            mTextureRenderer   = new TextureRenderer(mWebCamTexture.Texture, mRenderTextureLayer, resampledSize);
            mBufferReadTexture = new Texture2D(resampledSize.x, resampledSize.y);
            mReadPixelsRect    = new Rect(0, 0, resampledSize.x, resampledSize.y);

            IsTextureSizeAvailable = true;
        }

        // turn on and off BGRenderingBehaviour depending if Background Rendering has been enabled or not:
        mBgRenderingTexBehaviour.CheckAndSetActive(QCARRenderer.Instance.DrawVideoBackground);
    }
Beispiel #5
0
    public override bool SetRegionOfInterest(Rect detectionRegion, Rect trackingRegion)
    {
        Vector2 vector;
        Vector2 vector2;
        Vector2 vector3;
        Vector2 vector4;
        QCARAbstractBehaviour behaviour = (QCARAbstractBehaviour)UnityEngine.Object.FindObjectOfType(typeof(QCARAbstractBehaviour));

        if (behaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            return(false);
        }
        Rect viewportRectangle       = behaviour.GetViewportRectangle();
        bool videoBackGroundMirrored = behaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoMode = CameraDevice.Instance.GetVideoMode(behaviour.CameraDeviceMode);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(detectionRegion, videoBackGroundMirrored, out vector, out vector2);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(trackingRegion, videoBackGroundMirrored, out vector3, out vector4);
        QCARRenderer.Vec2I veci  = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci2 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector2, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci3 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector3, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci4 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector4, viewportRectangle, videoBackGroundMirrored, videoMode);
        if (QCARWrapper.Instance.TextTrackerSetRegionOfInterest(veci.x, veci.y, veci2.x, veci2.y, veci3.x, veci3.y, veci4.x, veci4.y, (int)this.CurrentUpDirection) == 0)
        {
            Debug.LogError(string.Format("Could not set region of interest: ({0}, {1}, {2}, {3}) - ({4}, {5}, {6}, {7})", new object[] { detectionRegion.x, detectionRegion.y, detectionRegion.width, detectionRegion.height, trackingRegion.x, trackingRegion.y, trackingRegion.width, trackingRegion.height }));
            return(false);
        }
        return(true);
    }
    private Rect ScreenSpaceRectFromCamSpaceRectData(RectangleIntData camSpaceRectData, Rect bgTextureViewPortRect,
                                                     bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        Vector2 topLeftSSLandscape = QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(new Vector2(camSpaceRectData.leftTopX, camSpaceRectData.leftTopY),
                                                                                              bgTextureViewPortRect, isTextureMirrored, videoModeData);
        Vector2 bottomRightSSLandscape = QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(new Vector2(camSpaceRectData.rightBottomX, camSpaceRectData.rightBottomY),
                                                                                                  bgTextureViewPortRect, isTextureMirrored, videoModeData);

        return(QCARRuntimeUtilities.CalculateRectFromLandscapeLeftCorners(topLeftSSLandscape, bottomRightSSLandscape, isTextureMirrored));
    }
    public override CameraDevice.VideoModeData GetVideoMode(CameraDevice.CameraDeviceMode mode)
    {
        if (QCARRuntimeUtilities.IsPlayMode())
        {
            return(this.WebCam.GetVideoMode());
        }
        IntPtr videoMode = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(CameraDevice.VideoModeData)));

        QCARWrapper.Instance.CameraDeviceGetVideoMode((int)mode, videoMode);
        CameraDevice.VideoModeData data = (CameraDevice.VideoModeData)Marshal.PtrToStructure(videoMode, typeof(CameraDevice.VideoModeData));
        Marshal.FreeHGlobal(videoMode);
        return(data);
    }
Beispiel #8
0
    public void ConfigureVideoBackground(bool forceReflectionSetting)
    {
        QCARRenderer.VideoBGCfgData videoBackgroundConfig = QCARRenderer.Instance.GetVideoBackgroundConfig();
        CameraDevice.VideoModeData  videoMode             = CameraDevice.Instance.GetVideoMode(this.CameraDeviceModeSetting);
        this.VideoBackGroundMirrored      = videoBackgroundConfig.reflection == QCARRenderer.VideoBackgroundReflection.ON;
        videoBackgroundConfig.enabled     = 1;
        videoBackgroundConfig.synchronous = this.SynchronousVideo ? 1 : 0;
        videoBackgroundConfig.position    = new QCARRenderer.Vec2I(0, 0);
        if (!QCARRuntimeUtilities.IsPlayMode() && forceReflectionSetting)
        {
            videoBackgroundConfig.reflection = this.MirrorVideoBackground;
        }
        bool flag = Screen.width > Screen.height;

        if (QCARRuntimeUtilities.IsPlayMode())
        {
            flag = true;
        }
        if (flag)
        {
            float num = videoMode.height * (((float)Screen.width) / ((float)videoMode.width));
            videoBackgroundConfig.size = new QCARRenderer.Vec2I(Screen.width, (int)num);
            if (videoBackgroundConfig.size.y < Screen.height)
            {
                videoBackgroundConfig.size.x = (int)(Screen.height * (((float)videoMode.width) / ((float)videoMode.height)));
                videoBackgroundConfig.size.y = Screen.height;
            }
        }
        else
        {
            float num2 = videoMode.height * (((float)Screen.height) / ((float)videoMode.width));
            videoBackgroundConfig.size = new QCARRenderer.Vec2I((int)num2, Screen.height);
            if (videoBackgroundConfig.size.x < Screen.width)
            {
                videoBackgroundConfig.size.x = Screen.width;
                videoBackgroundConfig.size.y = (int)(Screen.width * (((float)videoMode.width) / ((float)videoMode.height)));
            }
        }
        QCARRenderer.Instance.SetVideoBackgroundConfig(videoBackgroundConfig);
        int num3 = videoBackgroundConfig.position.x + ((Screen.width - videoBackgroundConfig.size.x) / 2);
        int num4 = videoBackgroundConfig.position.y + ((Screen.height - videoBackgroundConfig.size.y) / 2);

        this.mViewportRect = new Rect((float)num3, (float)num4, (float)videoBackgroundConfig.size.x, (float)videoBackgroundConfig.size.y);
        foreach (IVideoBackgroundEventHandler handler in this.mVideoBgEventHandlers)
        {
            handler.OnVideoBackgroundConfigChanged();
        }
    }
Beispiel #9
0
    // Configure the size and position of the video background rendered
    // natively when QCARManager.DrawVideoBackground is true
    private void ConfigureVideoBackground()
    {
        QCARRenderer.VideoBGCfgData config    = QCARRenderer.Instance.GetVideoBackgroundConfig();
        CameraDevice.VideoModeData  videoMode = CameraDevice.Instance.GetVideoMode(CameraDeviceModeSetting);

        config.enabled     = 1;
        config.synchronous = (SynchronousVideo ? 1 : 0);
        config.position    = new QCARRenderer.Vec2I(0, 0);

        if (Screen.width > Screen.height)
        {
            float height = videoMode.height * (Screen.width / (float)
                                               videoMode.width);
            config.size = new QCARRenderer.Vec2I(Screen.width, (int)height);

            if (config.size.y < Screen.height)
            {
                // Correcting rendering background size to handle missmatch
                // between screen and video aspect ratios
                config.size.x = (int)(Screen.height
                                      * (videoMode.width / (float)videoMode.height));
                config.size.y = Screen.height;
            }
        }
        else
        {
            float width = videoMode.height * (Screen.height / (float)
                                              videoMode.width);
            config.size = new QCARRenderer.Vec2I((int)width, Screen.height);

            if (config.size.x < Screen.width)
            {
                // Correcting rendering background size to handle missmatch
                // between screen and video aspect ratios
                config.size.x = Screen.width;
                config.size.y = (int)(Screen.width *
                                      (videoMode.width / (float)videoMode.height));
            }
        }

        QCARRenderer.Instance.SetVideoBackgroundConfig(config);

        int viewportX = config.position.x + (Screen.width - config.size.x) / 2;
        int viewportY = config.position.y + (Screen.height - config.size.y) / 2;

        mViewportRect = new Rect(viewportX, viewportY,
                                 config.size.x, config.size.y);
    }
    private void UpdateWordResultPoses(Camera arCamera, IEnumerable <QCARManagerImpl.WordResultData> wordResults)
    {
        QCARBehaviour qcarbehaviour = (QCARBehaviour)Object.FindObjectOfType(typeof(QCARBehaviour));

        if (qcarbehaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            return;
        }

        // required information to transform camera frame coordinates into screen space coordinates:
        Rect bgTextureViewPortRect = qcarbehaviour.GetViewportRectangle();
        bool isMirrored            = qcarbehaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoModeData = qcarbehaviour.GetVideoMode();

        foreach (var wrd in wordResults)
        {
            var wordResult = (WordResultImpl)mTrackedWords[wrd.id];

            var position = arCamera.transform.TransformPoint(wrd.pose.position);

            var wrdOrientation = wrd.pose.orientation;
            var rotation       = arCamera.transform.rotation *
                                 wrdOrientation *
                                 Quaternion.AngleAxis(270, Vector3.left);

            wordResult.SetPose(position, rotation);
            wordResult.SetStatus(wrd.status);

            var obb = new OrientedBoundingBox(wrd.orientedBoundingBox.center, wrd.orientedBoundingBox.halfExtents,
                                              wrd.orientedBoundingBox.rotation);
            wordResult.SetObb(QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(obb, bgTextureViewPortRect,
                                                                                       isMirrored, videoModeData));
        }

        // update word behaviours if enabled:
        if (mWordPrefabCreationMode == WordPrefabCreationMode.DUPLICATE)
        {
            UpdateWordBehaviourPoses();
        }
    }
    /// <summary>
    /// Defines the areas of the image in screen coordinates where text can be detected and tracked.
    /// </summary>
    public override bool SetRegionOfInterest(Rect detectionRegion, Rect trackingRegion)
    {
        QCARBehaviour qcarbehaviour = (QCARBehaviour)Object.FindObjectOfType(typeof(QCARBehaviour));

        if (qcarbehaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            return(false);
        }

        // required information to transform screen space coordinates into camera frame coordinates:
        Rect bgTextureViewPortRect = qcarbehaviour.GetViewportRectangle();
        bool isMirrored            = qcarbehaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoModeData = CameraDevice.Instance.GetVideoMode(qcarbehaviour.CameraDeviceMode);

        // depending on the current orientation, different corner points of the rect have to be taken
        // - they need to span a rectangle in the camera frame coordinate system
        Vector2 detectionLeftTop, detectionRightBottom, trackingLeftTop, trackingRightBottom;

        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(detectionRegion, isMirrored, out detectionLeftTop, out detectionRightBottom);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(trackingRegion, isMirrored, out trackingLeftTop, out trackingRightBottom);

        // transform the coordinates into camera frame coord system
        QCARRenderer.Vec2I camFrameDetectionLeftTop     = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(detectionLeftTop, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameDetectionRightBottom = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(detectionRightBottom, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameTrackingLeftTop      = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(trackingLeftTop, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameTrackingRightBottom  = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(trackingRightBottom, bgTextureViewPortRect, isMirrored, videoModeData);

        if (QCARWrapper.Instance.TextTrackerSetRegionOfInterest(camFrameDetectionLeftTop.x, camFrameDetectionLeftTop.y, camFrameDetectionRightBottom.x, camFrameDetectionRightBottom.y,
                                                                camFrameTrackingLeftTop.x, camFrameTrackingLeftTop.y, camFrameTrackingRightBottom.x, camFrameTrackingRightBottom.y, (int)CurrentUpDirection) == 0)
        {
            Debug.LogError(string.Format("Could not set region of interest: ({0}, {1}, {2}, {3}) - ({4}, {5}, {6}, {7})",
                                         detectionRegion.x, detectionRegion.y, detectionRegion.width, detectionRegion.height,
                                         trackingRegion.x, trackingRegion.y, trackingRegion.width, trackingRegion.height));
            return(false);
        }

        return(true);
    }
    /// <summary>
    /// Returns the areas of the image in screen coordinates where text can be detected and tracked.
    /// </summary>
    public override bool GetRegionOfInterest(out Rect detectionRegion, out Rect trackingRegion)
    {
        QCARBehaviour qcarbehaviour = (QCARBehaviour)Object.FindObjectOfType(typeof(QCARBehaviour));

        if (qcarbehaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            detectionRegion = new Rect();
            trackingRegion  = new Rect();
            return(false);
        }

        // required information to transform camera frame to screen space coordinates:
        Rect bgTextureViewPortRect = qcarbehaviour.GetViewportRectangle();
        bool isMirrored            = qcarbehaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoModeData = CameraDevice.Instance.GetVideoMode(qcarbehaviour.CameraDeviceMode);

        IntPtr detectionROIptr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(RectangleIntData)));
        IntPtr trackingROIptr  = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(RectangleIntData)));

        // get current region of interest from native
        QCARWrapper.Instance.TextTrackerGetRegionOfInterest(detectionROIptr, trackingROIptr);

        RectangleIntData detectionROIcamSpace = (RectangleIntData)Marshal.PtrToStructure(detectionROIptr, typeof(RectangleIntData));
        RectangleIntData trackingROIcamSpace  = (RectangleIntData)Marshal.PtrToStructure(trackingROIptr, typeof(RectangleIntData));

        Marshal.FreeHGlobal(detectionROIptr);
        Marshal.FreeHGlobal(trackingROIptr);

        // calculate screen space rect for detection and tracking regions:
        detectionRegion = ScreenSpaceRectFromCamSpaceRectData(detectionROIcamSpace, bgTextureViewPortRect, isMirrored, videoModeData);
        trackingRegion  = ScreenSpaceRectFromCamSpaceRectData(trackingROIcamSpace, bgTextureViewPortRect, isMirrored, videoModeData);

        return(true);
    }
    /// <summary>
    /// Configure the size and position of the video background rendered
    /// natively when QCARManager.DrawVideoBackground is true
    /// </summary>
    public void ConfigureVideoBackground(bool forceReflectionSetting)
    {
        QCARRenderer.VideoBGCfgData config    = QCARRenderer.Instance.GetVideoBackgroundConfig();
        CameraDevice.VideoModeData  videoMode = CameraDevice.Instance.GetVideoMode(CameraDeviceModeSetting);

        VideoBackGroundMirrored = config.reflection == QCARRenderer.VideoBackgroundReflection.ON;

        config.enabled     = 1;
        config.synchronous = (SynchronousVideo ? 1 : 0);
        config.position    = new QCARRenderer.Vec2I(0, 0);
        if (!QCARRuntimeUtilities.IsPlayMode())
        {
            // set the reflection parameter to the configured value (only on device)
            if (forceReflectionSetting)
            {
                config.reflection = MirrorVideoBackground;
            }
        }

        bool isLandscapeViewPort = Screen.width > Screen.height;

        if (QCARRuntimeUtilities.IsPlayMode())
        {
            isLandscapeViewPort = true; //editor only support landscape viewport
        }
        if (isLandscapeViewPort)
        {
            float height = videoMode.height * (Screen.width / (float)
                                               videoMode.width);
            config.size = new QCARRenderer.Vec2I(Screen.width, (int)height);

            if (config.size.y < Screen.height)
            {
                // Correcting rendering background size to handle missmatch
                // between screen and video aspect ratios
                config.size.x = (int)(Screen.height
                                      * (videoMode.width / (float)videoMode.height));
                config.size.y = Screen.height;
            }
        }
        else
        {
            float width = videoMode.height * (Screen.height / (float)
                                              videoMode.width);
            config.size = new QCARRenderer.Vec2I((int)width, Screen.height);

            if (config.size.x < Screen.width)
            {
                // Correcting rendering background size to handle missmatch
                // between screen and video aspect ratios
                config.size.x = Screen.width;
                config.size.y = (int)(Screen.width *
                                      (videoMode.width / (float)videoMode.height));
            }
        }

        QCARRenderer.Instance.SetVideoBackgroundConfig(config);

        int viewportX = config.position.x + (Screen.width - config.size.x) / 2;
        int viewportY = config.position.y + (Screen.height - config.size.y) / 2;

        mViewportRect = new Rect(viewportX, viewportY,
                                 config.size.x, config.size.y);

        foreach (IVideoBackgroundEventHandler handler in mVideoBgEventHandlers)
        {
            handler.OnVideoBackgroundConfigChanged();
        }
    }
    // wait for the first web cam frame to set config structs with correct size values
    // (mWebCamTexture.width and height are set to default values before the first frame is captured)
    public void Update()
    {
        if (!IsTextureSizeAvailable && mWebCamTexture.DidUpdateThisFrame)
        {
            QCARRenderer.Vec2I resampledSize = mWebCamProfile.ResampledTextureSize;

                mVideoModeData = new CameraDevice.VideoModeData
                                     {
                                         width = resampledSize.x,
                                         height = resampledSize.y,
                                         frameRate = mWebCamProfile.RequestedFPS // real fps not known, but not used in Unity any way...
                                     };

                mVideoTextureInfo = new QCARRenderer.VideoTextureInfo
                                        {
                                            imageSize = resampledSize,
                                            textureSize = resampledSize
                                        };

                mTextureRenderer = new TextureRenderer(mWebCamTexture.Texture, mRenderTextureLayer, resampledSize);
                mBufferReadTexture = new Texture2D(resampledSize.x, resampledSize.y);
                mReadPixelsRect = new Rect(0, 0, resampledSize.x, resampledSize.y);

            IsTextureSizeAvailable = true;
        }

        // turn on and off BGRenderingBehaviour depending if Background Rendering has been enabled or not:
        mBgRenderingTexBehaviour.CheckAndSetActive(QCARRenderer.Instance.DrawVideoBackground);
    }
Beispiel #15
0
 internal bool Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode)
 {
     if (this.mPaused)
     {
         QCARWrapper.Instance.PausedUpdateQCAR();
         return(false);
     }
     if (!QCARRuntimeUtilities.IsQCAREnabled())
     {
         this.UpdateTrackablesEditor();
         return(true);
     }
     this.UpdateImageContainer();
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance;
         if (instance.WebCam.DidUpdateThisFrame)
         {
             this.InjectCameraFrame();
         }
     }
     if (QCARWrapper.Instance.UpdateQCAR(this.mImageHeaderData, this.mNumImageHeaders, this.mLastProcessedFrameStatePtr, (int)counterRotation, (int)deviceMode) == 0)
     {
         return(false);
     }
     this.mFrameState = (FrameState)Marshal.PtrToStructure(this.mLastProcessedFrameStatePtr, typeof(FrameState));
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         videoMode = CameraDevice.Instance.GetVideoMode(deviceMode);
     }
     else
     {
         IntPtr videoModeData = this.mFrameState.videoModeData;
         videoMode = (CameraDevice.VideoModeData)Marshal.PtrToStructure(videoModeData, typeof(CameraDevice.VideoModeData));
     }
     this.InitializeTrackableContainer(this.mFrameState.numTrackableResults);
     this.UpdateCameraFrame();
     this.UpdateTrackers(this.mFrameState);
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         CameraDeviceImpl impl2 = (CameraDeviceImpl)CameraDevice.Instance;
         impl2.WebCam.SetFrameIndex(this.mFrameState.frameIndex);
     }
     if (!this.mDrawVideobackground)
     {
         this.RenderVideoBackgroundOrDrawIntoTextureInNative();
     }
     return(true);
 }
    /// <summary>
    /// Calculates the screen space parameters for an oriented bounding box (center, half extents, rotation) specified in camera frame coordinates.
    /// The calculation is based on the current screen orientation.
    /// </summary>
    public static OrientedBoundingBox CameraFrameToScreenSpaceCoordinates(OrientedBoundingBox cameraFrameObb, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        bool  isPortrait  = false;
        float obbRotation = 0.0f;

        switch (QCARRuntimeUtilities.ScreenOrientation)
        {
        case ScreenOrientation.Portrait:
            obbRotation += 90.0f;
            isPortrait   = true;
            break;

        case ScreenOrientation.LandscapeRight:
            obbRotation += 180.0f;
            break;

        case ScreenOrientation.PortraitUpsideDown:
            obbRotation += 270.0f;
            isPortrait   = true;
            break;
        }

        var scaleX = bgTextureViewPortRect.width / (isPortrait ? videoModeData.height : videoModeData.width);
        var scaleY = bgTextureViewPortRect.height / (isPortrait ? videoModeData.width : videoModeData.height);


        var center = CameraFrameToScreenSpaceCoordinates(cameraFrameObb.Center, bgTextureViewPortRect,
                                                         isTextureMirrored, videoModeData);
        var halfExtents = new Vector2(cameraFrameObb.HalfExtents.x * scaleX, cameraFrameObb.HalfExtents.y * scaleY);

        var rotation = cameraFrameObb.Rotation;

        if (isTextureMirrored)
        {
            rotation = -rotation;
        }
        rotation = rotation * 180.0f / Mathf.PI + obbRotation;

        return(new OrientedBoundingBox(center, halfExtents, rotation));
    }
    /// <summary>
    /// Calculates a position in screen space coordinates based on the current orientation and background config for a given screen-space position
    /// </summary>
    public static Vector2 CameraFrameToScreenSpaceCoordinates(Vector2 cameraFrameCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float viewportOrigX = bgTextureViewPortRect.xMin;
        float viewportOrigY = bgTextureViewPortRect.yMin;
        float viewportSizeX = bgTextureViewPortRect.width;
        float viewportSizeY = bgTextureViewPortRect.height;

        bool isPortrait = false;

        float textureSizeX = videoModeData.width;
        float textureSizeY = videoModeData.height;

        float prefixX = 0.0f;
        float prefixY = 0.0f;

        float inversionMultiplierX = 0.0f;
        float inversionMultiplierY = 0.0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);

        // normalize the coordinates within viewport between 0 and 1
        float normalizedCoordX = (cameraFrameCoordinate.x / textureSizeX - prefixX) / inversionMultiplierX;
        float normalizedCoordY = (cameraFrameCoordinate.y / textureSizeY - prefixY) / inversionMultiplierY;

        Vector2 result;

        // convert from screen coordinates to texture coordinates
        if (isPortrait)
        {
            result = new Vector2(viewportSizeX * normalizedCoordY + viewportOrigX,
                                 viewportSizeY * normalizedCoordX + viewportOrigY);
        }
        else
        {
            result = new Vector2(viewportSizeX * normalizedCoordX + viewportOrigX,
                                 viewportSizeY * normalizedCoordY + viewportOrigY);
        }

        return(result);
    }
    /// <summary>
    /// Calculates a position in camera frame coordinates based on the current orientation and background config for a given screen-space position
    /// </summary>
    public static QCARRenderer.Vec2I ScreenSpaceToCameraFrameCoordinates(Vector2 screenSpaceCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float viewportOrigX = bgTextureViewPortRect.xMin;
        float viewportOrigY = bgTextureViewPortRect.yMin;
        float viewportSizeX = bgTextureViewPortRect.width;
        float viewportSizeY = bgTextureViewPortRect.height;

        bool isPortrait = false;

        float textureSizeX = videoModeData.width;
        float textureSizeY = videoModeData.height;

        float prefixX = 0.0f;
        float prefixY = 0.0f;

        float inversionMultiplierX = 0.0f;
        float inversionMultiplierY = 0.0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);

        // normalize the coordinates within viewport between 0 and 1
        float normalizedCoordX = (screenSpaceCoordinate.x - viewportOrigX) / viewportSizeX;
        float normalizedCoordY = (screenSpaceCoordinate.y - viewportOrigY) / viewportSizeY;

        QCARRenderer.Vec2I result;

        // convert from screen coordinates to texture coordinates
        if (isPortrait)
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordY)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordX)) * textureSizeY));
        }
        else
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordX)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordY)) * textureSizeY));
        }

        return(result);
    }
Beispiel #19
0
    public static OrientedBoundingBox CameraFrameToScreenSpaceCoordinates(OrientedBoundingBox cameraFrameObb, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        bool  flag = false;
        float num  = 0f;

        switch (ScreenOrientation)
        {
        case UnityEngine.ScreenOrientation.Portrait:
            num += 90f;
            flag = true;
            break;

        case UnityEngine.ScreenOrientation.PortraitUpsideDown:
            num += 270f;
            flag = true;
            break;

        case UnityEngine.ScreenOrientation.LandscapeRight:
            num += 180f;
            break;
        }
        float   num2        = bgTextureViewPortRect.width / (flag ? ((float)videoModeData.height) : ((float)videoModeData.width));
        float   num3        = bgTextureViewPortRect.height / (flag ? ((float)videoModeData.width) : ((float)videoModeData.height));
        Vector2 center      = CameraFrameToScreenSpaceCoordinates(cameraFrameObb.Center, bgTextureViewPortRect, isTextureMirrored, videoModeData);
        Vector2 halfExtents = new Vector2(cameraFrameObb.HalfExtents.x * num2, cameraFrameObb.HalfExtents.y * num3);
        float   rotation    = cameraFrameObb.Rotation;

        if (isTextureMirrored)
        {
            rotation = -rotation;
        }
        return(new OrientedBoundingBox(center, halfExtents, ((rotation * 180f) / 3.141593f) + num));
    }
Beispiel #20
0
    public static Vector2 CameraFrameToScreenSpaceCoordinates(Vector2 cameraFrameCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float xMin                 = bgTextureViewPortRect.xMin;
        float yMin                 = bgTextureViewPortRect.yMin;
        float width                = bgTextureViewPortRect.width;
        float height               = bgTextureViewPortRect.height;
        bool  isPortrait           = false;
        float num5                 = videoModeData.width;
        float num6                 = videoModeData.height;
        float prefixX              = 0f;
        float prefixY              = 0f;
        float inversionMultiplierX = 0f;
        float inversionMultiplierY = 0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);
        float num11 = ((cameraFrameCoordinate.x / num5) - prefixX) / inversionMultiplierX;
        float num12 = ((cameraFrameCoordinate.y / num6) - prefixY) / inversionMultiplierY;

        if (isPortrait)
        {
            return(new Vector2((width * num12) + xMin, (height * num11) + yMin));
        }
        return(new Vector2((width * num11) + xMin, (height * num12) + yMin));
    }
    // Process the camera image and tracking data for this frame
    public void Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode)
    {
        if (mPaused)
        {
            // set the last frame again, do not update the state
            QCARWrapper.Instance.PausedUpdateQCAR();
        }
        else
        {
            // enable "fake tracking" if running in the free editor version
            // that does not support native plugins
            if (!QCARRuntimeUtilities.IsQCAREnabled())
            {
                UpdateTrackablesEditor();
                return;
            }

            // Prepare the camera image container
            UpdateImageContainer();

            if (QCARRuntimeUtilities.IsPlayMode())
            {
                CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance;
                if (cameraDeviceImpl.WebCam.DidUpdateThisFrame)
                {
                    InjectCameraFrame();
                }
            }

            // Draw the video background or update the video texture
            // Also retrieve registered camera images for this frame
            QCARWrapper.Instance.UpdateQCAR(mImageHeaderData, mNumImageHeaders,
                                            mLastProcessedFrameStatePtr, (int)counterRotation,
                                            (int)deviceMode);

            mFrameState = (FrameState)Marshal.PtrToStructure(mLastProcessedFrameStatePtr, typeof(FrameState));
        }


        // Get video mode data
        if (QCARRuntimeUtilities.IsPlayMode())
        {
            videoMode = CameraDevice.Instance.GetVideoMode(deviceMode);
        }
        else
        {
            var videoModePtr = mFrameState.videoModeData;
            videoMode =
                (CameraDevice.VideoModeData)
                Marshal.PtrToStructure(videoModePtr, typeof(CameraDevice.VideoModeData));
        }


        // Reinitialize the trackable data container if required:
        InitializeTrackableContainer(mFrameState.numTrackableResults);

        // Handle the camera image data
        UpdateCameraFrame();

        // Handle the trackable data
        UpdateTrackers(mFrameState);

        if (QCARRuntimeUtilities.IsPlayMode())
        {
            // read out the index of the last processed frame
            CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance;
            cameraDeviceImpl.WebCam.SetFrameIndex(mFrameState.frameIndex);
        }

        // if native video background rendering is disabled, we need to call the method to draw into the
        // offscreen texture here in the update loop.
        // rendering the video background into the texture in the PrepareRendering callback will result
        // in the texture updated in the next frame, which results in a lag between augmentations and the
        // video background
        if (!mDrawVideobackground)
        {
            RenderVideoBackgroundOrDrawIntoTextureInNative();
        }
    }
Beispiel #22
0
    public static QCARRenderer.Vec2I ScreenSpaceToCameraFrameCoordinates(Vector2 screenSpaceCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float xMin                 = bgTextureViewPortRect.xMin;
        float yMin                 = bgTextureViewPortRect.yMin;
        float width                = bgTextureViewPortRect.width;
        float height               = bgTextureViewPortRect.height;
        bool  isPortrait           = false;
        float num5                 = videoModeData.width;
        float num6                 = videoModeData.height;
        float prefixX              = 0f;
        float prefixY              = 0f;
        float inversionMultiplierX = 0f;
        float inversionMultiplierY = 0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);
        float num11 = (screenSpaceCoordinate.x - xMin) / width;
        float num12 = (screenSpaceCoordinate.y - yMin) / height;

        if (isPortrait)
        {
            return(new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * num12)) * num5), Mathf.RoundToInt((prefixY + (inversionMultiplierY * num11)) * num6)));
        }
        return(new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * num11)) * num5), Mathf.RoundToInt((prefixY + (inversionMultiplierY * num12)) * num6)));
    }