Ejemplo n.º 1
0
    // wait for the first web cam frame to set config structs with correct size values
    // (mWebCamTexture.width and height are set to default values before the first frame is captured)
    public void Update()
    {
        if (!IsTextureSizeAvailable && mWebCamTexture.DidUpdateThisFrame)
        {
            QCARRenderer.Vec2I resampledSize = mWebCamProfile.ResampledTextureSize;

            mVideoModeData = new CameraDevice.VideoModeData
            {
                width     = resampledSize.x,
                height    = resampledSize.y,
                frameRate = mWebCamProfile.RequestedFPS                          // real fps not known, but not used in Unity any way...
            };

            mVideoTextureInfo = new QCARRenderer.VideoTextureInfo
            {
                imageSize   = resampledSize,
                textureSize = resampledSize
            };

            mTextureRenderer   = new TextureRenderer(mWebCamTexture.Texture, mRenderTextureLayer, resampledSize);
            mBufferReadTexture = new Texture2D(resampledSize.x, resampledSize.y);
            mReadPixelsRect    = new Rect(0, 0, resampledSize.x, resampledSize.y);

            IsTextureSizeAvailable = true;
        }

        // turn on and off BGRenderingBehaviour depending if Background Rendering has been enabled or not:
        mBgRenderingTexBehaviour.CheckAndSetActive(QCARRenderer.Instance.DrawVideoBackground);
    }
Ejemplo n.º 2
0
 public WebCamTexAdaptorImpl(string deviceName, int requestedFPS, QCARRenderer.Vec2I requestedTextureSize)
 {
     this.mWebCamTexture.deviceName      = deviceName;
     this.mWebCamTexture.requestedFPS    = requestedFPS;
     this.mWebCamTexture.requestedWidth  = requestedTextureSize.x;
     this.mWebCamTexture.requestedHeight = requestedTextureSize.y;
 }
 internal void Update()
 {
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         if (!this.IsTextureSizeAvailable && this.mWebCamTexture.DidUpdateThisFrame)
         {
             QCARRenderer.Vec2I         resampledTextureSize = this.mWebCamProfile.ResampledTextureSize;
             CameraDevice.VideoModeData data = new CameraDevice.VideoModeData {
                 width     = resampledTextureSize.x,
                 height    = resampledTextureSize.y,
                 frameRate = this.mWebCamProfile.RequestedFPS
             };
             this.mVideoModeData = data;
             QCARRenderer.VideoTextureInfo info = new QCARRenderer.VideoTextureInfo {
                 imageSize   = resampledTextureSize,
                 textureSize = resampledTextureSize
             };
             this.mVideoTextureInfo      = info;
             this.mTextureRenderer       = new TextureRenderer(this.mWebCamTexture.Texture, this.mRenderTextureLayer, resampledTextureSize);
             this.mBufferReadTexture     = new Texture2D(resampledTextureSize.x, resampledTextureSize.y);
             this.mReadPixelsRect        = new Rect(0f, 0f, (float)resampledTextureSize.x, (float)resampledTextureSize.y);
             this.IsTextureSizeAvailable = true;
         }
         this.mBgRenderingTexBehaviour.CheckAndSetActive(QCARRenderer.Instance.DrawVideoBackground);
     }
 }
Ejemplo n.º 4
0
    public override bool SetRegionOfInterest(Rect detectionRegion, Rect trackingRegion)
    {
        Vector2 vector;
        Vector2 vector2;
        Vector2 vector3;
        Vector2 vector4;
        QCARAbstractBehaviour behaviour = (QCARAbstractBehaviour)UnityEngine.Object.FindObjectOfType(typeof(QCARAbstractBehaviour));

        if (behaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            return(false);
        }
        Rect viewportRectangle       = behaviour.GetViewportRectangle();
        bool videoBackGroundMirrored = behaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoMode = CameraDevice.Instance.GetVideoMode(behaviour.CameraDeviceMode);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(detectionRegion, videoBackGroundMirrored, out vector, out vector2);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(trackingRegion, videoBackGroundMirrored, out vector3, out vector4);
        QCARRenderer.Vec2I veci  = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci2 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector2, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci3 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector3, viewportRectangle, videoBackGroundMirrored, videoMode);
        QCARRenderer.Vec2I veci4 = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(vector4, viewportRectangle, videoBackGroundMirrored, videoMode);
        if (QCARWrapper.Instance.TextTrackerSetRegionOfInterest(veci.x, veci.y, veci2.x, veci2.y, veci3.x, veci3.y, veci4.x, veci4.y, (int)this.CurrentUpDirection) == 0)
        {
            Debug.LogError(string.Format("Could not set region of interest: ({0}, {1}, {2}, {3}) - ({4}, {5}, {6}, {7})", new object[] { detectionRegion.x, detectionRegion.y, detectionRegion.width, detectionRegion.height, trackingRegion.x, trackingRegion.y, trackingRegion.width, trackingRegion.height }));
            return(false);
        }
        return(true);
    }
 public NullWebCamTexAdaptor(int requestedFPS, QCARRenderer.Vec2I requestedTextureSize)
 {
     this.mTexture         = new Texture2D(requestedTextureSize.x, requestedTextureSize.y);
     this.mMsBetweenFrames = 1000.0 / ((double)requestedFPS);
     this.mLastFrame       = DateTime.Now - TimeSpan.FromDays(1.0);
     if (QCARRuntimeUtilities.IsQCAREnabled())
     {
         PlayModeEditorUtility.Instance.DisplayDialog("Error occurred!", "No camera connected!\nTo run your application using Play Mode, please connect a webcam to your computer.", "Ok");
         Debug.LogError("No camera connected!\nTo run your application using Play Mode, please connect a webcam to your computer.");
     }
 }
    public NullWebCamTexAdaptor(int requestedFPS, QCARRenderer.Vec2I requestedTextureSize)
    {
        mTexture         = new Texture2D(requestedTextureSize.x, requestedTextureSize.y);
        mMsBetweenFrames = 1000d / requestedFPS;
        // initialize last frame way back
        mLastFrame = DateTime.Now - TimeSpan.FromDays(1);

        if (QCARRuntimeUtilities.IsQCAREnabled())
        {
#if UNITY_EDITOR
            EditorUtility.DisplayDialog("Error occurred!", ERROR_MSG, "Ok");
#endif
            Debug.LogError(ERROR_MSG);
        }
    }
 public TextureRenderer(Texture textureToRender, int renderTextureLayer, QCARRenderer.Vec2I requestedTextureSize)
 {
     if (renderTextureLayer > 0x1f)
     {
         Debug.LogError("WebCamBehaviour.SetupTextureBufferCamera: configured layer > 31 is not supported by Unity!");
     }
     else
     {
         this.mTextureWidth  = requestedTextureSize.x;
         this.mTextureHeight = requestedTextureSize.y;
         float      y      = (((float)this.mTextureHeight) / ((float)this.mTextureWidth)) * 0.5f;
         GameObject target = new GameObject("TextureBufferCamera");
         this.mTextureBufferCamera = target.AddComponent <Camera>();
         this.mTextureBufferCamera.isOrthoGraphic   = true;
         this.mTextureBufferCamera.orthographicSize = y;
         this.mTextureBufferCamera.aspect           = ((float)this.mTextureWidth) / ((float)this.mTextureHeight);
         this.mTextureBufferCamera.nearClipPlane    = 0.5f;
         this.mTextureBufferCamera.farClipPlane     = 1.5f;
         this.mTextureBufferCamera.cullingMask      = ((int)1) << renderTextureLayer;
         this.mTextureBufferCamera.enabled          = false;
         if ((KeepAliveAbstractBehaviour.Instance != null) && KeepAliveAbstractBehaviour.Instance.KeepARCameraAlive)
         {
             UnityEngine.Object.DontDestroyOnLoad(target);
         }
         GameObject obj3 = new GameObject("TextureBufferMesh", new System.Type[] { typeof(MeshFilter), typeof(MeshRenderer) })
         {
             transform = { parent = target.transform },
             layer     = renderTextureLayer
         };
         Mesh mesh2 = new Mesh();
         mesh2.vertices  = new Vector3[] { new Vector3(-0.5f, y, 1f), new Vector3(0.5f, y, 1f), new Vector3(-0.5f, -y, 1f), new Vector3(0.5f, -y, 1f) };
         mesh2.uv        = new Vector2[] { new Vector2(0f, 0f), new Vector2(1f, 0f), new Vector2(0f, 1f), new Vector2(1f, 1f) };
         mesh2.triangles = new int[] { 0, 1, 2, 2, 1, 3 };
         Mesh         mesh      = mesh2;
         MeshRenderer component = obj3.GetComponent <MeshRenderer>();
         component.material                    = new Material(Shader.Find("Unlit/Texture"));
         component.material.mainTexture        = textureToRender;
         obj3.GetComponent <MeshFilter>().mesh = mesh;
     }
 }
    /// <summary>
    /// Calculates a position in camera frame coordinates based on the current orientation and background config for a given screen-space position
    /// </summary>
    public static QCARRenderer.Vec2I ScreenSpaceToCameraFrameCoordinates(Vector2 screenSpaceCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float viewportOrigX = bgTextureViewPortRect.xMin;
        float viewportOrigY = bgTextureViewPortRect.yMin;
        float viewportSizeX = bgTextureViewPortRect.width;
        float viewportSizeY = bgTextureViewPortRect.height;

        bool isPortrait = false;

        float textureSizeX = videoModeData.width;
        float textureSizeY = videoModeData.height;

        float prefixX = 0.0f;
        float prefixY = 0.0f;

        float inversionMultiplierX = 0.0f;
        float inversionMultiplierY = 0.0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);

        // normalize the coordinates within viewport between 0 and 1
        float normalizedCoordX = (screenSpaceCoordinate.x - viewportOrigX) / viewportSizeX;
        float normalizedCoordY = (screenSpaceCoordinate.y - viewportOrigY) / viewportSizeY;

        QCARRenderer.Vec2I result;

        // convert from screen coordinates to texture coordinates
        if (isPortrait)
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordY)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordX)) * textureSizeY));
        }
        else
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordX)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordY)) * textureSizeY));
        }

        return(result);
    }
Ejemplo n.º 9
0
    /// <summary>
    /// Defines the areas of the image in screen coordinates where text can be detected and tracked.
    /// </summary>
    public override bool SetRegionOfInterest(Rect detectionRegion, Rect trackingRegion)
    {
        QCARBehaviour qcarbehaviour = (QCARBehaviour)Object.FindObjectOfType(typeof(QCARBehaviour));

        if (qcarbehaviour == null)
        {
            Debug.LogError("QCAR Behaviour could not be found");
            return(false);
        }

        // required information to transform screen space coordinates into camera frame coordinates:
        Rect bgTextureViewPortRect = qcarbehaviour.GetViewportRectangle();
        bool isMirrored            = qcarbehaviour.VideoBackGroundMirrored;

        CameraDevice.VideoModeData videoModeData = CameraDevice.Instance.GetVideoMode(qcarbehaviour.CameraDeviceMode);

        // depending on the current orientation, different corner points of the rect have to be taken
        // - they need to span a rectangle in the camera frame coordinate system
        Vector2 detectionLeftTop, detectionRightBottom, trackingLeftTop, trackingRightBottom;

        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(detectionRegion, isMirrored, out detectionLeftTop, out detectionRightBottom);
        QCARRuntimeUtilities.SelectRectTopLeftAndBottomRightForLandscapeLeft(trackingRegion, isMirrored, out trackingLeftTop, out trackingRightBottom);

        // transform the coordinates into camera frame coord system
        QCARRenderer.Vec2I camFrameDetectionLeftTop     = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(detectionLeftTop, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameDetectionRightBottom = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(detectionRightBottom, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameTrackingLeftTop      = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(trackingLeftTop, bgTextureViewPortRect, isMirrored, videoModeData);
        QCARRenderer.Vec2I camFrameTrackingRightBottom  = QCARRuntimeUtilities.ScreenSpaceToCameraFrameCoordinates(trackingRightBottom, bgTextureViewPortRect, isMirrored, videoModeData);

        if (QCARWrapper.Instance.TextTrackerSetRegionOfInterest(camFrameDetectionLeftTop.x, camFrameDetectionLeftTop.y, camFrameDetectionRightBottom.x, camFrameDetectionRightBottom.y,
                                                                camFrameTrackingLeftTop.x, camFrameTrackingLeftTop.y, camFrameTrackingRightBottom.x, camFrameTrackingRightBottom.y, (int)CurrentUpDirection) == 0)
        {
            Debug.LogError(string.Format("Could not set region of interest: ({0}, {1}, {2}, {3}) - ({4}, {5}, {6}, {7})",
                                         detectionRegion.x, detectionRegion.y, detectionRegion.width, detectionRegion.height,
                                         trackingRegion.x, trackingRegion.y, trackingRegion.width, trackingRegion.height));
            return(false);
        }

        return(true);
    }
    /// <summary>
    /// Calculates a position in camera frame coordinates based on the current orientation and background config for a given screen-space position
    /// </summary>
    public static QCARRenderer.Vec2I ScreenSpaceToCameraFrameCoordinates(Vector2 screenSpaceCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData)
    {
        float viewportOrigX = bgTextureViewPortRect.xMin;
        float viewportOrigY = bgTextureViewPortRect.yMin;
        float viewportSizeX = bgTextureViewPortRect.width;
        float viewportSizeY = bgTextureViewPortRect.height;

        bool isPortrait = false;

        float textureSizeX = videoModeData.width;
        float textureSizeY = videoModeData.height;

        float prefixX = 0.0f;
        float prefixY = 0.0f;

        float inversionMultiplierX = 0.0f;
        float inversionMultiplierY = 0.0f;

        PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait);

        // normalize the coordinates within viewport between 0 and 1
        float normalizedCoordX = (screenSpaceCoordinate.x - viewportOrigX) / viewportSizeX;
        float normalizedCoordY = (screenSpaceCoordinate.y - viewportOrigY) / viewportSizeY;

        QCARRenderer.Vec2I result;

        // convert from screen coordinates to texture coordinates
        if (isPortrait)
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordY)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordX)) * textureSizeY));
        }
        else
        {
            result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordX)) * textureSizeX),
                                            Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordY)) * textureSizeY));
        }

        return result;
    }
Ejemplo n.º 11
0
    // sets up all gameobjects needed to render frames, including a mesh with the correct material
    public TextureRenderer(Texture textureToRender, int renderTextureLayer, QCARRenderer.Vec2I requestedTextureSize)
    {
        if (renderTextureLayer > 31)
        {
            Debug.LogError("WebCamBehaviour.SetupTextureBufferCamera: configured layer > 31 is not supported by Unity!");
            return;
        }

        mTextureWidth  = requestedTextureSize.x;
        mTextureHeight = requestedTextureSize.y;

        float halfMeshHeight = (mTextureHeight / (float)mTextureWidth) * 0.5f;

        // camera object:
        GameObject texBufferGameObj = new GameObject("TextureBufferCamera");

        mTextureBufferCamera = texBufferGameObj.AddComponent <Camera>();
        mTextureBufferCamera.isOrthoGraphic   = true;
        mTextureBufferCamera.orthographicSize = halfMeshHeight;
        mTextureBufferCamera.aspect           = mTextureWidth / (float)mTextureHeight;
        mTextureBufferCamera.nearClipPlane    = 0.5f;
        mTextureBufferCamera.farClipPlane     = 1.5f;
        mTextureBufferCamera.cullingMask      = (1 << renderTextureLayer);
        mTextureBufferCamera.enabled          = false; // camera will only render on demand!!
        // if the ARCamera should be used across multiple scenes, make sure the texture buffer object does not get destroyed:
        if (KeepAliveBehaviour.Instance != null && KeepAliveBehaviour.Instance.KeepARCameraAlive)
        {
            Object.DontDestroyOnLoad(texBufferGameObj);
        }

        // mesh to display the given texture
        GameObject textureBufferMesh = new GameObject("TextureBufferMesh", new[] { typeof(MeshFilter), typeof(MeshRenderer) });

        textureBufferMesh.transform.parent = texBufferGameObj.transform;
        textureBufferMesh.layer            = renderTextureLayer;

        Mesh mesh = new Mesh
        {
            vertices = new[]
            {
                new Vector3(-0.5f, halfMeshHeight, 1f),
                new Vector3(0.5f, halfMeshHeight, 1f),
                new Vector3(-0.5f, -halfMeshHeight, 1f),
                new Vector3(0.5f, -halfMeshHeight, 1f),
            },
            uv = new[]
            {
                new Vector2(0f, 0f),
                new Vector2(1f, 0f),
                new Vector2(0f, 1f),
                new Vector2(1f, 1f),
            },
            triangles = new[]
            {
                0, 1, 2,
                2, 1, 3
            }
        };

        // renderer and material
        MeshRenderer meshRenderer = textureBufferMesh.GetComponent <MeshRenderer>();

        meshRenderer.material             = new Material(Shader.Find("Unlit/Texture"));
        meshRenderer.material.mainTexture = textureToRender;
        MeshFilter meshFilter = textureBufferMesh.GetComponent <MeshFilter>();

        meshFilter.mesh = mesh;
    }