private void Update() { if (QCARManager.Instance.Initialized) { if (this.mAndroidUnityPlayer != null) { this.mAndroidUnityPlayer.Update(); } ScreenOrientation surfaceOrientation = (ScreenOrientation)QCARWrapper.Instance.GetSurfaceOrientation(); CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; if (instance.CameraReady && (QCARUnity.IsRendererDirty() || (this.mProjectionOrientation != surfaceOrientation))) { this.ConfigureVideoBackground(false); this.UpdateProjection(surfaceOrientation); instance.ResetDirtyFlag(); } this.mClearMaterial.SetPass(0); if (((QCARManagerImpl)QCARManager.Instance).Update(this.mProjectionOrientation, this.CameraDeviceMode, ref this.mVideoMode)) { this.UpdateCameraClearFlags(); foreach (ITrackerEventHandler handler in this.mTrackerEventHandlers) { handler.OnTrackablesUpdated(); } } } else if (QCARRuntimeUtilities.IsPlayMode()) { Debug.LogWarning("Scripts have been recompiled during Play mode, need to restart!"); QCARWrapper.Create(); PlayModeEditorUtility.Instance.RestartPlayMode(); } }
private void UpdateImageContainer() { CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; if ((this.mNumImageHeaders != instance.GetAllImages().Count) || ((instance.GetAllImages().Count > 0) && (this.mImageHeaderData == IntPtr.Zero))) { this.mNumImageHeaders = instance.GetAllImages().Count; Marshal.FreeHGlobal(this.mImageHeaderData); this.mImageHeaderData = Marshal.AllocHGlobal((int)(Marshal.SizeOf(typeof(ImageHeaderData)) * this.mNumImageHeaders)); } int num = 0; foreach (ImageImpl impl2 in instance.GetAllImages().Values) { IntPtr ptr = new IntPtr(this.mImageHeaderData.ToInt32() + (num * Marshal.SizeOf(typeof(ImageHeaderData)))); ImageHeaderData structure = new ImageHeaderData { width = impl2.Width, height = impl2.Height, stride = impl2.Stride, bufferWidth = impl2.BufferWidth, bufferHeight = impl2.BufferHeight, format = (int)impl2.PixelFormat, reallocate = 0, updated = 0, data = impl2.UnmanagedData }; Marshal.StructureToPtr(structure, ptr, false); num++; } }
private void UpdateCameraFrame() { int num = 0; CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; foreach (ImageImpl impl2 in instance.GetAllImages().Values) { IntPtr ptr = new IntPtr(this.mImageHeaderData.ToInt32() + (num * Marshal.SizeOf(typeof(ImageHeaderData)))); ImageHeaderData data = (ImageHeaderData)Marshal.PtrToStructure(ptr, typeof(ImageHeaderData)); impl2.Width = data.width; impl2.Height = data.height; impl2.Stride = data.stride; impl2.BufferWidth = data.bufferWidth; impl2.BufferHeight = data.bufferHeight; impl2.PixelFormat = (Image.PIXEL_FORMAT)data.format; if (data.reallocate == 1) { impl2.Pixels = new byte[QCARWrapper.Instance.QcarGetBufferSize(impl2.BufferWidth, impl2.BufferHeight, (int)impl2.PixelFormat)]; Marshal.FreeHGlobal(impl2.UnmanagedData); impl2.UnmanagedData = Marshal.AllocHGlobal(QCARWrapper.Instance.QcarGetBufferSize(impl2.BufferWidth, impl2.BufferHeight, (int)impl2.PixelFormat)); } else if (data.updated == 1) { impl2.CopyPixelsFromUnmanagedBuffer(); } num++; } }
private void InjectCameraFrame() { CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; GCHandle handle = GCHandle.Alloc(instance.WebCam.GetPixels32AndBufferFrame(this.mInjectedFrameIdx), GCHandleType.Pinned); IntPtr pixels = handle.AddrOfPinnedObject(); int actualWidth = instance.WebCam.ActualWidth; int actualHeight = instance.WebCam.ActualHeight; QCARWrapper.Instance.QcarAddCameraFrame(pixels, actualWidth, actualHeight, 0x10, 4 * actualWidth, this.mInjectedFrameIdx, instance.WebCam.FlipHorizontally ? 1 : 0); this.mInjectedFrameIdx++; pixels = IntPtr.Zero; handle.Free(); }
// Checks if the GL surface has changed public static bool IsRendererDirty() { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; if (QCARRuntimeUtilities.IsPlayMode()) { return(cameraDeviceImpl.IsDirty()); } else { // check native renderer return(QCARWrapper.Instance.IsRendererDirty() == 1 || cameraDeviceImpl.IsDirty()); } }
public static bool IsRendererDirty() { CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; if (QCARRuntimeUtilities.IsPlayMode()) { return(instance.IsDirty()); } if (QCARWrapper.Instance.IsRendererDirty() != 1) { return(instance.IsDirty()); } return(true); }
public override QCARRenderer.VideoTextureInfo GetVideoTextureInfo() { if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; return(instance.WebCam.GetVideoTextureInfo()); } IntPtr texInfo = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(QCARRenderer.VideoTextureInfo))); QCARWrapper.Instance.RendererGetVideoBackgroundTextureInfo(texInfo); QCARRenderer.VideoTextureInfo info = (QCARRenderer.VideoTextureInfo)Marshal.PtrToStructure(texInfo, typeof(QCARRenderer.VideoTextureInfo)); Marshal.FreeHGlobal(texInfo); return(info); }
internal bool Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode) { if (this.mPaused) { QCARWrapper.Instance.PausedUpdateQCAR(); return(false); } if (!QCARRuntimeUtilities.IsQCAREnabled()) { this.UpdateTrackablesEditor(); return(true); } this.UpdateImageContainer(); if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl instance = (CameraDeviceImpl)CameraDevice.Instance; if (instance.WebCam.DidUpdateThisFrame) { this.InjectCameraFrame(); } } if (QCARWrapper.Instance.UpdateQCAR(this.mImageHeaderData, this.mNumImageHeaders, this.mLastProcessedFrameStatePtr, (int)counterRotation, (int)deviceMode) == 0) { return(false); } this.mFrameState = (FrameState)Marshal.PtrToStructure(this.mLastProcessedFrameStatePtr, typeof(FrameState)); if (QCARRuntimeUtilities.IsPlayMode()) { videoMode = CameraDevice.Instance.GetVideoMode(deviceMode); } else { IntPtr videoModeData = this.mFrameState.videoModeData; videoMode = (CameraDevice.VideoModeData)Marshal.PtrToStructure(videoModeData, typeof(CameraDevice.VideoModeData)); } this.InitializeTrackableContainer(this.mFrameState.numTrackableResults); this.UpdateCameraFrame(); this.UpdateTrackers(this.mFrameState); if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl impl2 = (CameraDeviceImpl)CameraDevice.Instance; impl2.WebCam.SetFrameIndex(this.mFrameState.frameIndex); } if (!this.mDrawVideobackground) { this.RenderVideoBackgroundOrDrawIntoTextureInNative(); } return(true); }
// gets a snapshot from the private void InjectCameraFrame() { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; Color32[] pixels = cameraDeviceImpl.WebCam.GetPixels32AndBufferFrame(mInjectedFrameIdx); GCHandle pixelHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned); IntPtr pixelPointer = pixelHandle.AddrOfPinnedObject(); int width = cameraDeviceImpl.WebCam.ActualWidth; int height = cameraDeviceImpl.WebCam.ActualHeight; // add a camera frame - it always has to be rotated and flipped by default QCARWrapper.Instance.QcarAddCameraFrame(pixelPointer, width, height, (int)Image.PIXEL_FORMAT.RGBA8888, 4 * width, mInjectedFrameIdx, cameraDeviceImpl.WebCam.FlipHorizontally ? 1 : 0); mInjectedFrameIdx++; pixelPointer = IntPtr.Zero; pixelHandle.Free(); }
// Unmarshal the camera images for this frame private void UpdateCameraFrame() { // Unmarshal the image data: int i = 0; CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; foreach (ImageImpl image in cameraDeviceImpl.GetAllImages().Values) { IntPtr imagePtr = new IntPtr(mImageHeaderData.ToInt32() + i * Marshal.SizeOf(typeof(ImageHeaderData))); ImageHeaderData imageHeader = (ImageHeaderData) Marshal.PtrToStructure(imagePtr, typeof(ImageHeaderData)); // Copy info back to managed Image instance: image.Width = imageHeader.width; image.Height = imageHeader.height; image.Stride = imageHeader.stride; image.BufferWidth = imageHeader.bufferWidth; image.BufferHeight = imageHeader.bufferHeight; image.PixelFormat = (Image.PIXEL_FORMAT)imageHeader.format; // Reallocate if required: if (imageHeader.reallocate == 1) { image.Pixels = new byte[QCARWrapper.Instance.QcarGetBufferSize(image.BufferWidth, image.BufferHeight, (int)image.PixelFormat)]; Marshal.FreeHGlobal(image.UnmanagedData); image.UnmanagedData = Marshal.AllocHGlobal(QCARWrapper.Instance.QcarGetBufferSize(image.BufferWidth, image.BufferHeight, (int)image.PixelFormat)); // Note we don't copy the data this frame as the unmanagedVirtualButtonBehaviour // buffer was not filled. } else if (imageHeader.updated == 1) { // Copy data: image.CopyPixelsFromUnmanagedBuffer(); } ++i; } }
// Process the camera image and tracking data for this frame public void Update(ScreenOrientation counterRotation) { // enable "fake tracking" if running in the free editor version // that does not support native plugins if (!QCARRuntimeUtilities.IsQCAREnabled()) { UpdateTrackablesEditor(); return; } // Prepare the camera image container UpdateImageContainer(); if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; if (cameraDeviceImpl.WebCam.DidUpdateThisFrame) { InjectCameraFrame(); } } // Draw the video background or update the video texture // Also retrieve registered camera images for this frame QCARWrapper.Instance.UpdateQCAR(mImageHeaderData, mNumImageHeaders, mDrawVideobackground ? 0 : 1, mLastProcessedFrameStatePtr, (int)counterRotation); FrameState frameState = (FrameState)Marshal.PtrToStructure(mLastProcessedFrameStatePtr, typeof(FrameState)); // Reinitialize the trackable data container if required: InitializeTrackableContainer(frameState.numTrackableResults); // Handle the camera image data UpdateCameraFrame(); // Handle the trackable data UpdateTrackers(frameState); if (QCARRuntimeUtilities.IsPlayMode()) { // read out the index of the last processed frame CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; cameraDeviceImpl.WebCam.SetFrameIndex(frameState.frameIndex); } }
// Updates the scene with new tracking data. Calls registered // ITrackerEventHandlers void Update() { if (QCARManager.Instance.Initialized) { // Get the current orientation of the surface: ScreenOrientation surfaceOrientation = (ScreenOrientation)QCARWrapper.Instance.GetSurfaceOrientation(); // Check if we need to update the video background configuration and projection matrix: CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; if (cameraDeviceImpl.CameraReady && (QCARUnity.IsRendererDirty() || mProjectionOrientation != surfaceOrientation)) { ConfigureVideoBackground(false); UpdateProjection(surfaceOrientation); cameraDeviceImpl.ResetDirtyFlag(); } // Bind a simple material to clear the OpenGL state mClearMaterial.SetPass(0); // QCARManager renders the camera image and updates the trackables ((QCARManagerImpl)QCARManager.Instance).Update(mProjectionOrientation); // Tell Unity that we may have changed the OpenGL state behind the scenes GL.InvalidateState(); // Update the camera clear flags UpdateCameraClearFlags(); // Let the trackable event handlers know that all trackables have been updated foreach (ITrackerEventHandler handler in mTrackerEventHandlers) { handler.OnTrackablesUpdated(); } } else if (QCARRuntimeUtilities.IsPlayMode()) { // in some rare occasions, Unity re-compiles the scripts shortly after starting play mode // this invalidates the internal state, so we have to restart play mode in order to ensure correct execution. Debug.LogWarning("Scripts have been recompiled during Play mode, need to restart!"); // re-establish wrapper: QCARWrapper.Create(); // stop and restart play mode QCARRuntimeUtilities.RestartPlayMode(); } }
// Update the image container for the currently registered formats private void UpdateImageContainer() { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; // Reallocate the data container if the number of requested images has // changed, or if the container is not allocated if (mNumImageHeaders != cameraDeviceImpl.GetAllImages().Count || (cameraDeviceImpl.GetAllImages().Count > 0 && mImageHeaderData == IntPtr.Zero)) { mNumImageHeaders = cameraDeviceImpl.GetAllImages().Count; Marshal.FreeHGlobal(mImageHeaderData); mImageHeaderData = Marshal.AllocHGlobal(Marshal.SizeOf( typeof(ImageHeaderData)) * mNumImageHeaders); } // Update the image info: int i = 0; foreach (ImageImpl image in cameraDeviceImpl.GetAllImages().Values) { IntPtr imagePtr = new IntPtr(mImageHeaderData.ToInt32() + i * Marshal.SizeOf(typeof(ImageHeaderData))); ImageHeaderData imageHeader = new ImageHeaderData(); imageHeader.width = image.Width; imageHeader.height = image.Height; imageHeader.stride = image.Stride; imageHeader.bufferWidth = image.BufferWidth; imageHeader.bufferHeight = image.BufferHeight; imageHeader.format = (int)image.PixelFormat; imageHeader.reallocate = 0; imageHeader.updated = 0; imageHeader.data = image.UnmanagedData; Marshal.StructureToPtr(imageHeader, imagePtr, false); ++i; } }
// Returns the texture info associated with the current video background public override VideoTextureInfo GetVideoTextureInfo() { if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; return(cameraDeviceImpl.WebCam.GetVideoTextureInfo()); } else { IntPtr ptr = Marshal.AllocHGlobal( Marshal.SizeOf(typeof(VideoTextureInfo))); QCARWrapper.Instance.RendererGetVideoBackgroundTextureInfo(ptr); VideoTextureInfo info = (VideoTextureInfo)Marshal.PtrToStructure (ptr, typeof(VideoTextureInfo)); Marshal.FreeHGlobal(ptr); return(info); } }
// Process the camera image and tracking data for this frame public void Update(ScreenOrientation counterRotation, CameraDevice.CameraDeviceMode deviceMode, ref CameraDevice.VideoModeData videoMode) { if (mPaused) { // set the last frame again, do not update the state QCARWrapper.Instance.PausedUpdateQCAR(); } else { // enable "fake tracking" if running in the free editor version // that does not support native plugins if (!QCARRuntimeUtilities.IsQCAREnabled()) { UpdateTrackablesEditor(); return; } // Prepare the camera image container UpdateImageContainer(); if (QCARRuntimeUtilities.IsPlayMode()) { CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; if (cameraDeviceImpl.WebCam.DidUpdateThisFrame) { InjectCameraFrame(); } } // Draw the video background or update the video texture // Also retrieve registered camera images for this frame QCARWrapper.Instance.UpdateQCAR(mImageHeaderData, mNumImageHeaders, mLastProcessedFrameStatePtr, (int)counterRotation, (int)deviceMode); mFrameState = (FrameState)Marshal.PtrToStructure(mLastProcessedFrameStatePtr, typeof(FrameState)); } // Get video mode data if (QCARRuntimeUtilities.IsPlayMode()) { videoMode = CameraDevice.Instance.GetVideoMode(deviceMode); } else { var videoModePtr = mFrameState.videoModeData; videoMode = (CameraDevice.VideoModeData) Marshal.PtrToStructure(videoModePtr, typeof(CameraDevice.VideoModeData)); } // Reinitialize the trackable data container if required: InitializeTrackableContainer(mFrameState.numTrackableResults); // Handle the camera image data UpdateCameraFrame(); // Handle the trackable data UpdateTrackers(mFrameState); if (QCARRuntimeUtilities.IsPlayMode()) { // read out the index of the last processed frame CameraDeviceImpl cameraDeviceImpl = (CameraDeviceImpl)CameraDevice.Instance; cameraDeviceImpl.WebCam.SetFrameIndex(mFrameState.frameIndex); } // if native video background rendering is disabled, we need to call the method to draw into the // offscreen texture here in the update loop. // rendering the video background into the texture in the PrepareRendering callback will result // in the texture updated in the next frame, which results in a lag between augmentations and the // video background if (!mDrawVideobackground) { RenderVideoBackgroundOrDrawIntoTextureInNative(); } }