public override bool SelectVideoMode(CameraDevice.CameraDeviceMode mode)
 {
     if (QCARWrapper.Instance.CameraDeviceSelectVideoMode((int)mode) == 0)
     {
         return(false);
     }
     return(true);
 }
 public override bool SelectVideoMode(CameraDevice.CameraDeviceMode mode)
 {
     if (VuforiaWrapper.Instance.CameraDeviceSelectVideoMode((int)mode) == 0)
     {
         return(false);
     }
     this.mCameraDeviceMode           = mode;
     this.mHasCameraDeviceModeBeenSet = true;
     this.mVideoModeDataNeedsUpdate   = true;
     return(true);
 }
    public override CameraDevice.VideoModeData GetVideoMode(CameraDevice.CameraDeviceMode mode)
    {
        if (QCARRuntimeUtilities.IsPlayMode())
        {
            return(this.WebCam.GetVideoMode());
        }
        IntPtr videoMode = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(CameraDevice.VideoModeData)));

        QCARWrapper.Instance.CameraDeviceGetVideoMode((int)mode, videoMode);
        CameraDevice.VideoModeData data = (CameraDevice.VideoModeData)Marshal.PtrToStructure(videoMode, typeof(CameraDevice.VideoModeData));
        Marshal.FreeHGlobal(videoMode);
        return(data);
    }
        public override CameraDevice.VideoModeData GetVideoMode(CameraDevice.CameraDeviceMode mode)
        {
            if (VuforiaRuntimeUtilities.IsPlayMode())
            {
                return(this.WebCam.GetVideoMode());
            }
            IntPtr intPtr = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(CameraDevice.VideoModeData)));

            VuforiaWrapper.Instance.CameraDeviceGetVideoMode((int)mode, intPtr);
            CameraDevice.VideoModeData arg_4F_0 = (CameraDevice.VideoModeData)Marshal.PtrToStructure(intPtr, typeof(CameraDevice.VideoModeData));
            Marshal.FreeHGlobal(intPtr);
            return(arg_4F_0);
        }
示例#5
0
 internal bool Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode)
 {
     if (this.mPaused)
     {
         QCARWrapper.Instance.PausedUpdateQCAR();
         return(false);
     }
     if (!QCARRuntimeUtilities.IsQCAREnabled())
     {
         this.UpdateTrackablesEditor();
         return(true);
     }
     this.UpdateImageContainer();
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance;
         if (instance.WebCam.DidUpdateThisFrame)
         {
             this.InjectCameraFrame();
         }
     }
     if (QCARWrapper.Instance.UpdateQCAR(this.mImageHeaderData, this.mNumImageHeaders, this.mLastProcessedFrameStatePtr, (int)counterRotation, (int)deviceMode) == 0)
     {
         return(false);
     }
     this.mFrameState = (FrameState)Marshal.PtrToStructure(this.mLastProcessedFrameStatePtr, typeof(FrameState));
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         videoMode = CameraDevice.Instance.GetVideoMode(deviceMode);
     }
     else
     {
         IntPtr videoModeData = this.mFrameState.videoModeData;
         videoMode = (CameraDevice.VideoModeData)Marshal.PtrToStructure(videoModeData, typeof(CameraDevice.VideoModeData));
     }
     this.InitializeTrackableContainer(this.mFrameState.numTrackableResults);
     this.UpdateCameraFrame();
     this.UpdateTrackers(this.mFrameState);
     if (QCARRuntimeUtilities.IsPlayMode())
     {
         CameraDeviceImpl impl2 = (CameraDeviceImpl)CameraDevice.Instance;
         impl2.WebCam.SetFrameIndex(this.mFrameState.frameIndex);
     }
     if (!this.mDrawVideobackground)
     {
         this.RenderVideoBackgroundOrDrawIntoTextureInNative();
     }
     return(true);
 }
 protected override void Awake()
 {
     VuforiaAbstractConfiguration.GenericVuforiaConfiguration vuforia = VuforiaAbstractConfiguration.Instance.Vuforia;
     this.CameraDeviceModeSetting        = vuforia.CameraDeviceMode;
     this.MaxSimultaneousImageTargets    = vuforia.MaxSimultaneousImageTargets;
     this.MaxSimultaneousObjectTargets   = vuforia.MaxSimultaneousObjectTargets;
     this.UseDelayedLoadingObjectTargets = vuforia.UseDelayedLoadingObjectTargets;
     this.CameraDirection       = vuforia.CameraDirection;
     this.MirrorVideoBackground = vuforia.MirrorVideoBackground;
     this.mWorldCenterMode      = base.VuforiaBehaviour.WorldCenterMode;
     this.mWorldCenter          = base.VuforiaBehaviour.WorldCenter;
     this.mEyewearBehaviour     = DigitalEyewearARController.Instance;
     if (this.mEyewearBehaviour == null)
     {
         Debug.LogError("Failed to get an instance of DigitalEyewearBehaviour");
     }
     this.mVideoBackgroundMgr = VideoBackgroundManager.Instance;
     this.mVideoBackgroundMgr.Initialize();
 }
        private bool StopVuforia()
        {
            this.mHasStarted = false;
            Debug.Log("StopVuforia");
            ObjectTracker tracker = TrackerManager.Instance.GetTracker <ObjectTracker>();

            if (tracker != null)
            {
                tracker.Stop();
            }
            CameraDevice.Instance.GetSelectedCameraDirection(out this.CameraDirection);
            this.CameraDeviceModeSetting = this.CameraDeviceMode;
            if (!CameraDevice.Instance.Stop())
            {
                return(false);
            }
            if (!CameraDevice.Instance.Deinit())
            {
                return(false);
            }
            VuforiaRenderer.Instance.ClearVideoBackgroundConfig();
            Device.Instance.DeleteRenderingPrimitives();
            return(true);
        }
    // Process the camera image and tracking data for this frame
    public void Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode)
    {
        if (mPaused)
        {
            // set the last frame again, do not update the state
            QCARWrapper.Instance.PausedUpdateQCAR();
        }
        else
        {
            // enable "fake tracking" if running in the free editor version
            // that does not support native plugins
            if (!QCARRuntimeUtilities.IsQCAREnabled())
            {
                UpdateTrackablesEditor();
                return;
            }

            // Prepare the camera image container
            UpdateImageContainer();

            if (QCARRuntimeUtilities.IsPlayMode())
            {
                CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance;
                if (cameraDeviceImpl.WebCam.DidUpdateThisFrame)
                {
                    InjectCameraFrame();
                }
            }

            // Draw the video background or update the video texture
            // Also retrieve registered camera images for this frame
            QCARWrapper.Instance.UpdateQCAR(mImageHeaderData, mNumImageHeaders,
                                            mLastProcessedFrameStatePtr, (int)counterRotation,
                                            (int)deviceMode);

            mFrameState = (FrameState)Marshal.PtrToStructure(mLastProcessedFrameStatePtr, typeof(FrameState));
        }


        // Get video mode data
        if (QCARRuntimeUtilities.IsPlayMode())
        {
            videoMode = CameraDevice.Instance.GetVideoMode(deviceMode);
        }
        else
        {
            var videoModePtr = mFrameState.videoModeData;
            videoMode =
                (CameraDevice.VideoModeData)
                Marshal.PtrToStructure(videoModePtr, typeof(CameraDevice.VideoModeData));
        }


        // Reinitialize the trackable data container if required:
        InitializeTrackableContainer(mFrameState.numTrackableResults);

        // Handle the camera image data
        UpdateCameraFrame();

        // Handle the trackable data
        UpdateTrackers(mFrameState);

        if (QCARRuntimeUtilities.IsPlayMode())
        {
            // read out the index of the last processed frame
            CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance;
            cameraDeviceImpl.WebCam.SetFrameIndex(mFrameState.frameIndex);
        }

        // if native video background rendering is disabled, we need to call the method to draw into the
        // offscreen texture here in the update loop.
        // rendering the video background into the texture in the PrepareRendering callback will result
        // in the texture updated in the next frame, which results in a lag between augmentations and the
        // video background
        if (!mDrawVideobackground)
        {
            RenderVideoBackgroundOrDrawIntoTextureInNative();
        }
    }
 public override bool GetSelectedVideoMode(out CameraDevice.CameraDeviceMode mode)
 {
     mode = this.mCameraDeviceMode;
     return(this.mHasCameraDeviceModeBeenSet);
 }
 private void SetViewerActive(bool isActive, bool deinitCam, bool initCam, CameraDevice.CameraDirection camDirection, CameraDevice.CameraDeviceMode mode)
 {
     if (deinitCam)
     {
         CameraDevice.Instance.Stop();
         CameraDevice.Instance.Deinit();
     }
     Device.Instance.SetViewerActive(isActive);
     if (initCam)
     {
         CameraDevice.Instance.Init(camDirection);
         CameraDevice.Instance.SelectVideoMode(mode);
         CameraDevice.Instance.Start();
     }
     this.ConfigureView();
 }
示例#11
0
 public void InitCameraDevice(CameraDevice.CameraDeviceMode cameraDeviceMode, VuforiaRenderer.VideoBackgroundReflection mirrorVideoBackground, Action onVideoBackgroundConfigChanged)
 {
 }
示例#12
0
 public abstract bool GetSelectedVideoMode(out CameraDevice.CameraDeviceMode mode);
示例#13
0
 public abstract bool SelectVideoMode(CameraDevice.CameraDeviceMode mode);
示例#14
0
 public abstract CameraDevice.VideoModeData GetVideoMode(CameraDevice.CameraDeviceMode mode);
 public void InitCameraDevice(CameraDevice.CameraDeviceMode cameraDeviceMode, VuforiaRenderer.VideoBackgroundReflection mirrorVideoBackground, Action onVideoBackgroundConfigChanged)
 {
     this.mCameraDeviceMode  = cameraDeviceMode;
     this.mInitialReflection = mirrorVideoBackground;
     this.mOnVideoBackgroundConfigChanged = onVideoBackgroundConfigChanged;
 }