private static bool TryGetLastFrameFromExtensions(out XRCameraFrame frame) { ARCoreExtensions extensions = ARCoreExtensions._instance; ARCameraManager cameraManager = extensions.CameraManager; var cameraParams = new XRCameraParams { zNear = cameraManager.GetComponent <Camera>().nearClipPlane, zFar = cameraManager.GetComponent <Camera>().farClipPlane, screenWidth = Screen.width, screenHeight = Screen.height, screenOrientation = Screen.orientation }; if (!cameraManager.subsystem.TryGetLatestFrame( cameraParams, out frame)) { Debug.LogWarning( "The current XRCameraFrame is not available, try again later."); return(false); } if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero) { Debug.LogWarning( "The current XRCameraFrame is not ready, try again later."); return(false); } return(true); }
public static void ImplicitTest() { CameraFrame f1 = new CameraFrame() { timestampNs = 1, averageBrightness = 2f, averageColorTemperature = 3f, colorCorrection = Color.red, projectionMatrix = TestUtil.MockMatrix(4), displayMatrix = TestUtil.MockMatrix(20), trackingState = TrackingState.Tracking, nativePtr = new IntPtr(36), properties = XRCameraFrameProperties.DisplayMatrix, averageIntensityInLumens = 37f, exposureDuration = 38f, exposureOffset = 39f }; XRCameraFrame xr = f1; Debug.Log(xr); CameraFrame f2 = xr; Assert.AreEqual(f1, f2); }
public unsafe void Convert(out XRCameraFrame target) { fixed(XRCameraFrame *targetPtr = &target) fixed(XRCameraFrameMock * selfPtr = &this) { UnsafeUtility.MemCpy(targetPtr, selfPtr, sizeof(XRCameraFrame)); } }
public static IntPtr FrameHandle(this XRCameraFrame frame) { FrameNativePointerStruct info = (FrameNativePointerStruct) Marshal.PtrToStructure( frame.nativePtr, typeof(FrameNativePointerStruct)); return(info.FrameHandle); }
/// <summary> /// Invoke the camera frame received event packing the frame information into the event argument. /// <summary> /// <param name="frame">The camera frame raising the event.</param> void InvokeFrameReceivedEvent(XRCameraFrame frame) { var lightEstimation = new ARLightEstimationData(); if (frame.hasAverageBrightness) { lightEstimation.averageBrightness = frame.averageBrightness; } if (frame.hasAverageIntensityInLumens) { lightEstimation.averageIntensityInLumens = frame.averageIntensityInLumens; } if (frame.hasAverageColorTemperature) { lightEstimation.averageColorTemperature = frame.averageColorTemperature; } if (frame.hasColorCorrection) { lightEstimation.colorCorrection = frame.colorCorrection; } var eventArgs = new ARCameraFrameEventArgs(); eventArgs.lightEstimation = lightEstimation; if (frame.hasTimestamp) { eventArgs.timestampNs = frame.timestampNs; } if (frame.hasProjectionMatrix) { eventArgs.projectionMatrix = frame.projectionMatrix; } if (frame.hasDisplayMatrix) { eventArgs.displayMatrix = frame.displayMatrix; } s_Textures.Clear(); s_PropertyIds.Clear(); foreach (var textureInfo in m_TextureInfos) { s_Textures.Add(textureInfo.texture); s_PropertyIds.Add(textureInfo.descriptor.propertyNameId); } eventArgs.textures = s_Textures; eventArgs.propertyNameIds = s_PropertyIds; frameReceived(eventArgs); }
/// <summary> /// (iOS only) Run pose estimation on an XRCamera Frame (async) /// </summary> /// <param name="frame">The camera frame to run prediction on</param> public static void ProcessPoseFromFrameAsync(XRCameraFrame frame) { IntPtr buffer = frame.nativePtr; if (buffer == IntPtr.Zero) { Debug.LogError("buffer is NULL!"); return; } FritziOSPoseManager.ProcessPoseAsync(buffer); }
/// <summary> /// (iOS only) Run pose estimation on an XRCamera Frame (synchronized) /// </summary> /// <param name="frame">The camera frame to run prediction on</param> public static List <FritzPose> ProcessPoseFromFrame(XRCameraFrame frame) { IntPtr buffer = frame.nativePtr; if (buffer == IntPtr.Zero) { Debug.LogError("buffer is NULL!"); return(null); } string message = FritziOSPoseManager.ProcessPose(buffer); return(ProcessEncodedPoses(message)); }
public static IntPtr FrameHandle(this XRCameraFrame frame) { #if UNITY_EDITOR if (UnityEngine.Application.isEditor) { return(ARCoreCloudAnchorsEditorDelegate.dummyFramePtr); } #endif FrameNativePointerStruct info = (FrameNativePointerStruct) Marshal.PtrToStructure( frame.nativePtr, typeof(FrameNativePointerStruct)); return(info.FrameHandle); }
/// <summary> /// Get the 4x4 image display matrix for the camera frame. This is used by the texture /// populated based on CPU images /// to calculate the display coordinates. /// </summary> /// <param name="frame">The XRCameraFrame instance.</param> /// <returns>The 4x4 image display matrix.</returns> public static Matrix4x4 GetImageDisplayMatrix(this XRCameraFrame frame) { // Unity Screen Coordinate: Android Screen Coordinate (flipped Y-Axis): // (0, 1) (1, 1) (0, 0) (1, 0) // |----------------| |----------------| // | | | | // | | | | // | | | | // | | | | // | | | | // | | | | // | | | | // |----------------| |----------------| // (0, 0) (1, 0) (0, 1) (1, 1) IntPtr sessionHandle = ARCoreExtensions._instance.currentARCoreSessionHandle; // X-Axis (1, 0) in Unity view maps to (1, 1) on Android screen. Vector2 affineBasisX = new Vector2(1.0f, 1.0f); // Y-Axis (0, 1) in Unity view maps to (0, 0) on Android screen. Vector2 affineBasisY = new Vector2(0.0f, 0.0f); // Origin (0, 0) in Unity view maps to (0, 1) on Android screen. Vector2 affineOrigin = new Vector2(0.0f, 1.0f); Vector2 transformedX = FrameApi.TransformCoordinates2d( sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized, ApiCoordinates2dType.ImageNormalized, ref affineBasisX); Vector2 transformedY = FrameApi.TransformCoordinates2d( sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized, ApiCoordinates2dType.ImageNormalized, ref affineBasisY); Vector2 transformedOrigin = FrameApi.TransformCoordinates2d( sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized, ApiCoordinates2dType.ImageNormalized, ref affineOrigin); Matrix4x4 imageMatrix = Matrix4x4.identity; imageMatrix[0, 0] = transformedX.x - transformedOrigin.x; imageMatrix[0, 1] = transformedX.y - transformedOrigin.y; imageMatrix[1, 0] = transformedY.x - transformedOrigin.x; imageMatrix[1, 1] = transformedY.y - transformedOrigin.y; imageMatrix[2, 0] = transformedOrigin.x; imageMatrix[2, 1] = transformedOrigin.y; return(imageMatrix); }
public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { var remote = ARKitReceiver.Instance; if (remote == null) { cameraFrame = default(XRCameraFrame); return(false); } var remoteFrame = ARKitReceiver.Instance.CameraFrame; if (remoteFrame.timestampNs == default(long)) { cameraFrame = default(XRCameraFrame); return(false); } const XRCameraFrameProperties properties = XRCameraFrameProperties.Timestamp | XRCameraFrameProperties.ProjectionMatrix | XRCameraFrameProperties.DisplayMatrix; cameraFrame = new CameraFrame() { timestampNs = remoteFrame.timestampNs, averageBrightness = 0, averageColorTemperature = 0, colorCorrection = default(Color), projectionMatrix = remoteFrame.projectionMatrix, displayMatrix = remoteFrame.displayMatrix, trackingState = TrackingState.Tracking, nativePtr = new IntPtr(0), properties = properties, averageIntensityInLumens = 0, exposureDuration = 0, exposureOffset = 0 }; // Debug.Log(cameraFrame); return(true); }
public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { if (!Application.isPlaying || !mockCamera.isPrepared) { cameraFrame = default(XRCameraFrame); return(false); } const XRCameraFrameProperties properties = XRCameraFrameProperties.Timestamp // | XRCameraFrameProperties.ProjectionMatrix | XRCameraFrameProperties.DisplayMatrix; Matrix4x4 displayMatrix = GetDisplayTransform( (float)mockCamera.texture.width / mockCamera.texture.height, (float)Screen.width / Screen.height ); cameraFrame = (XRCameraFrame) new CameraFrame() { timestampNs = DateTime.Now.Ticks, averageBrightness = 0, averageColorTemperature = 0, colorCorrection = default(Color), projectionMatrix = Matrix4x4.identity, displayMatrix = displayMatrix, trackingState = TrackingState.Tracking, nativePtr = new IntPtr(0), properties = properties, averageIntensityInLumens = 0, exposureDuration = 0, exposureOffset = 0, mainLightIntensityLumens = 0, mainLightColor = default(Color), ambientSphericalHarmonics = default(SphericalHarmonicsL2), cameraGrain = default(XRTextureDescriptor), noiseIntensity = 0, }; // Debug.Log(cameraFrame); return(true); }
/// <summary> /// Pull the texture descriptors from the camera subsystem, and update the texture information maintained by /// this component. /// </summary> /// <param name="frame">The latest updated camera frame.</param> void UpdateTexturesInfos(XRCameraFrame frame) { var textureDescriptors = subsystem.GetTextureDescriptors(Allocator.Temp); try { int numUpdated = Math.Min(m_TextureInfos.Count, textureDescriptors.Length); // Update the existing textures that are in common between the two arrays. for (int i = 0; i < numUpdated; ++i) { m_TextureInfos[i] = ARTextureInfo.GetUpdatedTextureInfo(m_TextureInfos[i], textureDescriptors[i]); } // If there are fewer textures in the current frame than we had previously, destroy any remaining unneeded // textures. if (numUpdated < m_TextureInfos.Count) { for (int i = numUpdated; i < m_TextureInfos.Count; ++i) { m_TextureInfos[i].Reset(); } m_TextureInfos.RemoveRange(numUpdated, (m_TextureInfos.Count - numUpdated)); } // Else, if there are more textures in the current frame than we have previously, add new textures for any // additional descriptors. else if (textureDescriptors.Length > m_TextureInfos.Count) { for (int i = numUpdated; i < textureDescriptors.Length; ++i) { m_TextureInfos.Add(new ARTextureInfo(textureDescriptors[i])); } } } finally { if (textureDescriptors.IsCreated) { textureDescriptors.Dispose(); } } }
public static bool UnityARKit_Camera_TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { cameraFrame = default(XRCameraFrame); return(false); }
public static extern bool UnityARKit_Camera_TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame);
/// <summary> /// Get the current camera frame for the subsystem. /// </summary> /// <param name="cameraParams">The current Unity <c>Camera</c> parameters.</param> /// <param name="cameraFrame">The current camera frame returned by the method.</param> /// <returns> /// <c>true</c> if the method successfully got a frame. Otherwise, <c>false</c>. /// </returns> public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { return(NativeApi.UnityARKit_Camera_TryGetFrame(cameraParams, out cameraFrame)); }
public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { var timestamp = CameraApi.timestamp; if (this.cameraParams != cameraParams || this.timestamp != timestamp || this.screenSize != CameraApi.screenSize || this.screenOrientation != CameraApi.screenOrientation) { try { var result = new XRCameraFrameMock(); if (CameraApi.timestamp.HasValue) { result.m_TimestampNs = CameraApi.timestamp.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.Timestamp; } if (CameraApi.averageBrightness.HasValue) { result.m_AverageColorTemperature = CameraApi.averageBrightness.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.AverageBrightness; } if (CameraApi.averageColorTemperature.HasValue) { result.m_AverageColorTemperature = CameraApi.averageColorTemperature.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.AverageColorTemperature; } if (CameraApi.colorCorrection.HasValue) { result.m_ColorCorrection = CameraApi.colorCorrection.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.ColorCorrection; } if (CameraApi.projectionMatrix.HasValue) { Matrix4x4 screenMatrix = Matrix4x4.identity; if (CameraApi.screenSize.HasValue) { var sourceScreenSize = CameraApi.screenSize.Value; var sourceAspect = sourceScreenSize.x / sourceScreenSize.y; var screenAspect = cameraParams.screenWidth / cameraParams.screenHeight; if (sourceAspect < screenAspect) { screenMatrix.m00 = sourceAspect / screenAspect; } else { screenMatrix.m11 = screenAspect / sourceAspect; } } result.m_ProjectionMatrix = screenMatrix * CameraApi.projectionMatrix.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.ProjectionMatrix; } if (CameraApi.displayMatrix.HasValue) { result.m_DisplayMatrix = CameraApi.displayMatrix.Value; result.m_Properties = result.m_Properties | XRCameraFrameProperties.DisplayMatrix; } result.m_TrackingState = TrackingState.Tracking; result.m_NativePtr = IntPtr.Zero; result.Convert(out cameraFrame); return(true); } finally { this.timestamp = timestamp; this.cameraParams = cameraParams; this.screenSize = CameraApi.screenSize; this.screenOrientation = CameraApi.screenOrientation; } } cameraFrame = default; return(false); }
public override bool TryGetFrame(XRCameraParams cameraParams, out UnityEngine.XR.ARSubsystems.XRCameraFrame cameraFrame) { var frame = new XRCameraFrame(); XRCameraFrameProperties properties = 0; if (m_LastLightEstimation.m_AmbientBrightness.HasValue) { frame.AverageBrightness = m_LastLightEstimation.m_AmbientBrightness.Value; properties |= XRCameraFrameProperties.AverageBrightness; } if (m_LastLightEstimation.m_AmbientColorTemperature.HasValue) { frame.AverageColorTemperature = m_LastLightEstimation.m_AmbientColorTemperature.Value; properties |= XRCameraFrameProperties.AverageColorTemperature; } if (m_LastLightEstimation.m_ColorCorrection.HasValue) { frame.ColorCorrection = m_LastLightEstimation.m_ColorCorrection.Value; properties |= XRCameraFrameProperties.ColorCorrection; } #if ARSUBSYSTEMS_3_OR_NEWER if (m_LastLightEstimation.m_AmbientIntensityInLumens.HasValue) { frame.AverageIntensityInLumens = m_LastLightEstimation.m_AmbientIntensityInLumens.Value; properties |= XRCameraFrameProperties.AverageIntensityInLumens; } #endif #if ARSUBSYSTEMS_4_OR_NEWER if (m_LastLightEstimation.m_MainLightColor.HasValue) { frame.MainLightColor = m_LastLightEstimation.m_MainLightColor.Value; properties |= XRCameraFrameProperties.MainLightColor; } if (m_LastLightEstimation.m_MainLightDirection.HasValue) { frame.MainLightDirection = m_LastLightEstimation.m_MainLightDirection.Value; properties |= XRCameraFrameProperties.MainLightDirection; } if (m_LastLightEstimation.m_MainLightIntensityLumens.HasValue) { frame.MainLightIntensityLumens = m_LastLightEstimation.m_MainLightIntensityLumens.Value; properties |= XRCameraFrameProperties.MainLightIntensityLumens; } if (m_LastLightEstimation.m_SphericalHarmonics.HasValue) { frame.AmbientSphericalHarmonics = m_LastLightEstimation.m_SphericalHarmonics.Value; properties |= XRCameraFrameProperties.AmbientSphericalHarmonics; } #endif frame.Properties = properties; var union = new XRCameraFrameUnion { m_OurXRCameraFrame = frame }; cameraFrame = union.m_TheirXRCameraFrame; return(true); }
public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame) { cameraFrame = default(XRCameraFrame); return(false); }