/** * Static method that returns the AR capabilities available to the device, e.g. position tracking * and surface estimation. */ public static XRCapabilities GetDeviceCapabilities() { XRCapabilities.PositionTracking position = XRCapabilities.PositionTracking.UNSPECIFIED; XRCapabilities.SurfaceEstimation surface = XRCapabilities.SurfaceEstimation.UNSPECIFIED; XRCapabilities.TargetImageDetection imageDetect = XRCapabilities.TargetImageDetection.UNSPECIFIED; XREnvironment.Reader env = XRNativeBridge.GetXREnvironment(); switch (env.getCapabilities().getPositionTracking()) { case c8.XRCapabilities.PositionalTrackingKind.ROTATION_AND_POSITION: position = XRCapabilities.PositionTracking.ROTATION_AND_POSITION; break; case c8.XRCapabilities.PositionalTrackingKind.ROTATION_AND_POSITION_NO_SCALE: position = XRCapabilities.PositionTracking.ROTATION_AND_POSITION_NO_SCALE; break; default: position = XRCapabilities.PositionTracking.UNSPECIFIED; break; } switch (env.getCapabilities().getSurfaceEstimation()) { case c8.XRCapabilities.SurfaceEstimationKind.FIXED_SURFACES: surface = XRCapabilities.SurfaceEstimation.FIXED_SURFACES; break; case c8.XRCapabilities.SurfaceEstimationKind.HORIZONTAL_ONLY: surface = XRCapabilities.SurfaceEstimation.HORIZONTAL_ONLY; break; case c8.XRCapabilities.SurfaceEstimationKind.HORIZONTAL_AND_VERTICAL: surface = XRCapabilities.SurfaceEstimation.HORIZONTAL_AND_VERTICAL; break; default: surface = XRCapabilities.SurfaceEstimation.UNSPECIFIED; break; } switch (env.getCapabilities().getTargetImageDetection()) { case c8.XRCapabilities.TargetImageDetectionKind.UNSUPPORTED: imageDetect = XRCapabilities.TargetImageDetection.UNSUPPORTED; break; case c8.XRCapabilities.TargetImageDetectionKind.FIXED_SIZE_IMAGE_TARGET: imageDetect = XRCapabilities.TargetImageDetection.FIXED_SIZE_IMAGE_TARGET; break; default: imageDetect = XRCapabilities.TargetImageDetection.UNSPECIFIED; break; } return(new XRCapabilities(position, surface, imageDetect)); }
private void SetEngineMode() { XREngineConfiguration.SpecialExecutionMode engineMode = XREngineConfiguration.SpecialExecutionMode.NORMAL; if (remoteOnly) { engineMode = XREngineConfiguration.SpecialExecutionMode.REMOTE_ONLY; } else if (disableNativeAr) { engineMode = XREngineConfiguration.SpecialExecutionMode.DISABLE_NATIVE_AR_ENGINE; } var configMessageBuilder = new MessageBuilder(); var config = configMessageBuilder.initRoot(XRConfiguration.factory); config.getEngineConfiguration() .setMode(engineMode); bridge.CommitConfiguration(configMessageBuilder); xrEnvironment = XRNativeBridge.GetXREnvironment(); if (cam != null) { UpdateCameraProjectionMatrix(cam, origin, facing, scale); } }
// Awake is called first at app startup. void Awake() { running = false; bridge = new XRNativeBridge(enableRemote); bridge.Create(GetRenderingSystem()); if (EnableRemote()) { editorBridge = new XREditorBridge(); } xrEnvironment = XRNativeBridge.GetXREnvironment(); Application.targetFrameRate = 60; }
void Update() { if (!explicitlyPaused) { RunIfPaused(); } updateNumber++; if (EnableRemote()) { bridge.SetEditorAppInfo(editorBridge.EditorAppInfo()); bool firstConnect = false; if (!remoteConnected) { remoteConnected = bridge.IsRemoteConnected(); firstConnect = remoteConnected; } if (remoteConnected) { var remoteData = bridge.GetXRRemote(); if (firstConnect) { editorBridge.SetPlayerAspect(remoteData); xrEnvironment = XRNativeBridge.GetXREnvironment(); } editorBridge.SendDeviceInfo(remoteData); editorBridge.Update(); // Send camera aspect info to editor on every frame, in case the preview size changes. ConfigureXR(); } else { editorBridge.CheckADB(); } } var r = GetCurrentReality(); if (lastRealityMicros >= r.getEventId().getEventTimeMicros()) { return; } lastRealityMicros = r.getEventId().getEventTimeMicros(); }