private void OnGetInitPoseCallback(string errorJson, string resultJson) { if (errorJson != null) { Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: Failed to get init pose"); return; } VLModelTrackerCommands.GetInitPoseResult result = VLJsonUtility.FromJson <VLModelTrackerCommands.GetInitPoseResult>( resultJson); Vector3 position; Quaternion orientation; VLUnityCameraHelper.VLPoseToCamera( new Vector3(result.t[0], result.t[1], result.t[2]), new Quaternion(result.q[0], result.q[1], result.q[2], result.q[3]), out position, out orientation, this.workerBehaviour.flipCoordinateSystemHandedness); if (this.initCamera != null) { this.initCamera.transform.position = position; this.initCamera.transform.rotation = orientation; this.originalPosition = position; this.originalOrientation = orientation; this.ready = true; } else { Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: initCamera is null"); } }
/// <summary> /// Retrieves the device info object from the AbstractApplication. /// </summary> /// <returns> /// <c>VLDeviceInfo</c>, if the device info was acquired successfully; /// <c>null</c> otherwise. /// </returns> public VLDeviceInfo GetDeviceInfo() { if (this.disposed) { throw new ObjectDisposedException("VLAbstractApplicationWrapper"); } string deviceInfo; StringBuilder sb = new StringBuilder(65536); if (!vlAbstractApplicationWrapper_GetDeviceInfo(this.handle, sb, Convert.ToUInt32(sb.Capacity + 1))) { Debug.LogError("No valid device info returned..."); return(null); } deviceInfo = sb.ToString(); Debug.Log("[vlUnitySDK] DeviceInfoJson: " + deviceInfo); VLDeviceInfo devInfo = VLJsonUtility.FromJson <VLDeviceInfo>(deviceInfo); return(devInfo); }
/// <summary> /// Function for obtaining all poses of a WorkSpace definition directly from vlSDK. /// </summary> /// <returns> /// Array of Workspace.Transform in the vlSDK coordinate system, which represent all /// camera poses described by this WorkSpace.Definition /// </returns> public Transform[] GetCameraTransforms() { int transformsSize = 0; IntPtr transforms = vlSDKUtil_getCameraTransformsFromWorkspaceDefinition( VLJsonUtility.ToJson(this), out transformsSize); if (transforms == IntPtr.Zero) { return(new WorkSpace.Transform[0]); } float[] transformsVector = new float[transformsSize * 7]; Marshal.Copy(transforms, transformsVector, 0, transformsSize * 7); Marshal.FreeHGlobal(transforms); WorkSpace.Transform[] result = new WorkSpace.Transform[transformsSize]; for (int i = 0; i < transformsSize; i++) { result[i] = new WorkSpace.Transform( new Vector3( transformsVector[i * 7 + 0], transformsVector[i * 7 + 1], transformsVector[i * 7 + 2]), new Quaternion( transformsVector[i * 7 + 3], transformsVector[i * 7 + 4], transformsVector[i * 7 + 5], transformsVector[i * 7 + 6])); } return(result); }
private void OnGetInitPoseCallback(string errorJson, string resultJson) { if (errorJson != null) { Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: Failed to get init pose"); return; } VLModelTrackerCommands.GetInitPoseResult result = VLJsonUtility.FromJson <VLModelTrackerCommands.GetInitPoseResult>( resultJson); Vector3 position; Quaternion orientation; VLUnityCameraHelper.VLPoseToCamera( new Vector3(result.t[0], result.t[1], result.t[2]), new Quaternion(result.q[0], result.q[1], result.q[2], result.q[3]), out position, out orientation); this.originalPosition = position; this.originalOrientation = orientation; this.initPoseReady = true; this.reset = true; // This will set the new pose during the next Update // call }
/// <summary> /// Function for obtaining all positions of a geometry definition directly from vlSDK. /// </summary> /// <returns>Array of Unity coordinates, which are described by the given Geometry</returns> public Vector3[] GetCameraPositions() { int positionsSize = 0; IntPtr positions = vlSDKUtil_getCameraPositionsFromGeometry(VLJsonUtility.ToJson(this), out positionsSize); if (positions == IntPtr.Zero) { return(new Vector3[0]); } float[] positionsVector = new float[positionsSize * 3]; Marshal.Copy(positions, positionsVector, 0, positionsSize * 3); Marshal.FreeHGlobal(positions); Vector3[] cameraPositions = new Vector3[positionsSize]; for (int i = 0; i < positionsSize; i++) { cameraPositions[i] = VLUnityCameraHelper.VLPoseToUnityPose(new Vector3( positionsVector[i * 3], positionsVector[i * 3 + 1], positionsVector[i * 3 + 2])); } return(cameraPositions); }
private void SetGlobalCoordinateSystemInVisionLib() { if (this.globalCoordinateSystem != IntPtr.Zero) { Marshal.Release(this.globalCoordinateSystem); } this.globalCoordinateSystem = WorldManager.GetNativeISpatialCoordinateSystemPtr(); if (this.globalCoordinateSystem == IntPtr.Zero) { Debug.LogError("[vlUnitySDK] Failed to retrieve spatial coordinate system"); return; } if (!this.InitWorkerReference()) { Debug.LogError("[vlUnitySDK] Failed to get the VLWorker from VLHoloLensinitCameraBehaviour for SetGlobalCoordinateSystemInVisionLib"); return; } this.worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson( new VLHoloLensModelTrackerCommands. SetGlobalCoordinateSystemCommandDescription()), this.globalCoordinateSystem, 0, null, IntPtr.Zero); }
/// <summary> /// Add all (sub)meshes to the tracking system. If a sub mesh has its own /// VLModelTrackableBehaviour, it will not be added, but this behaviour should /// manage the relevant submeshes. /// </summary> public void UpdateModel(bool success = true) { if (success && this.modelTrackerBehaviour.workerBehaviour) { VLWorker worker = this.modelTrackerBehaviour.workerBehaviour.GetWorker(); if (worker == null) { Debug.Log("[vlUnitySDK] VLModelTrackableBehaviour.UpdateModel: Worker is not correctly initialized\n"); return; } AddModelDataCommandDescription command = this.GenerateModelDataDescriptor(true); byte[] binaryData = this.GenerateBinaryData( this.modelData, this.binaryOffset); GCHandle binaryDataHandle = GCHandle.Alloc(binaryData, GCHandleType.Pinned); IntPtr data = binaryDataHandle.AddrOfPinnedObject(); UInt32 dataLength = Convert.ToUInt32(binaryData.Length); gcHandleQueue.Enqueue(binaryDataHandle); worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson(command), data, dataLength, dispatchAddModelCallbackDelegate, GCHandle.ToIntPtr(this.gcHandle)); } }
private void PerformanceInfoHandler(string performanceInfoJson) { VLPerformanceInfo performanceInfo = VLJsonUtility.FromJson <VLPerformanceInfo>(performanceInfoJson); if (OnPerformanceInfo != null) { OnPerformanceInfo(performanceInfo); } }
private void TrackingStateHandler(string trackingStateJson) { VLTrackingState state = VLJsonUtility.FromJson <VLTrackingState>(trackingStateJson); if (state != null && OnTrackingStates != null) { OnTrackingStates(state); } }
private void OnGetAttributeCallback(string errorJson, string resultJson) { this.getting = false; // The callback might occur after the behaviour was disabled if (!this.enabled) { return; } if (errorJson != null) { CommandError error = VLJsonUtility.FromJson <CommandError>(errorJson); Debug.LogWarning("[vlUnitySDK] OnGetAttributeCallback: " + error.message); return; } GetAttributeResult result = VLJsonUtility.FromJson <GetAttributeResult>(resultJson); if (this.parameterValue != result.value) { this.parameterValue = result.value; switch (this.internalParameterType) { case ParameterType.String: this.stringValueChangedEvent.Invoke(result.value); break; case ParameterType.Int: this.intValueChangedEvent.Invoke( Convert.ToInt32(result.value, CultureInfo.InvariantCulture)); this.floatValueChangedEvent.Invoke( Convert.ToSingle(result.value, CultureInfo.InvariantCulture)); break; case ParameterType.Float: this.floatValueChangedEvent.Invoke( Convert.ToSingle(result.value, CultureInfo.InvariantCulture)); break; case ParameterType.Bool: this.boolValueChangedEvent.Invoke( VLRuntimeParameterBehaviour.ToBoolean(result.value)); break; default: Debug.LogWarning("[vlUnitySDK] OnGetAttributeCallback: Unknown parameter type"); break; } } }
private void UpdateInitPose(Camera cam) { // Get the VisionLib transformation from the Unity camera VLUnityCameraHelper.CameraToVLPose( cam, this.rotCamera, out this.t, out this.q); // Convert the transformation into JSON this.initPose.Set(t, q); this.initPoseString = VLJsonUtility.ToJson(this.initPose, this.prettyPrint); }
/// <summary> /// Enqueues a command for the tracking thread. /// </summary> /// <remarks> /// <para> /// The command gets processed asynchronously by the tracking thread and /// a callback will called once after the processing has finished. /// </para> /// <para> /// The different commands are defined inside the /// <see cref="VLWorkerCommands"/> namespace. /// </para> /// </remarks> /// <param name="cmd"> /// The command object. /// </param> /// <param name="callback"> /// Callback, which will be called inside <see cref="ProcessCallbacks"/> /// after the command was processed. /// </param> /// <param name="clientData"> /// The callback function will be called with the given pointer value. /// </param> /// <returns> /// <c>true</c>, if the command was enqueue successfully; /// <c>false</c> otherwise. /// </returns> public bool PushCommand(VLWorkerCommands.CommandBase cmd, JsonStringCallback callback, IntPtr clientData) { if (this.disposed) { throw new ObjectDisposedException("VLWorker"); } return(vlWorker_PushJsonCommand(this.handle, VLJsonUtility.ToJson(cmd), callback, clientData)); }
private void CameraCalibrationDataHandler(string errorJson, string resultJson) { if (OnCameraCalibrationData != null) { VLCameraCalibrationAnswer calib = VLJsonUtility.FromJson <VLCameraCalibrationAnswer>(resultJson); if (calib != null && calib.calibration != null && calib.stateChange.command == "getResults") { OnCameraCalibrationData(calib.calibration); } } }
private void GetModelPropertiesHandler(string errorJson, string resultJson) { if (OnGetModelProperties != null) { VLModelPropertiesStructure modelProperties = null; if (resultJson != null) { modelProperties = VLJsonUtility.FromJson <VLModelPropertiesStructure>(resultJson); OnGetModelProperties(modelProperties.info); } else { OnGetModelProperties(null); } } }
private void CreateTrackerHandler(string errorJson, string resultJson) { bool hasError = (errorJson != null); if (OnTrackerInitializedWithIssues != null) { VLTrackingIssues errorIssues = null; VLTrackingIssues warningIssues = null; if (errorJson != null) { errorIssues = VLJsonUtility.FromJson <VLTrackingIssues>(errorJson); } if (resultJson != null) { warningIssues = VLJsonUtility.FromJson <VLTrackingIssues>(resultJson); } OnTrackerInitializedWithIssues(errorIssues, warningIssues); } if (OnTrackerInitialized != null) { OnTrackerInitialized(errorJson == null); } // Push the RunTracking command after calling the OnTrackerInitialized // event in order to give the user the chance to push commands which // will then be executed before the tracking is running. // only run if no error has occured... if (!hasError) { this.worker.PushCommand( new RunTrackingCmd(), DispatchRunTrackingCallback, GCHandle.ToIntPtr(this.gcHandle)); } else { StopTracking(); } }
/// <summary> /// Returns the current transformation of the Camera component in /// VisionLib coordinates as JSON string. /// </summary> /// <returns> /// JSON string with initial pose in VisionLib coordinates. /// </returns> public string GetInitPoseJson() { if (!this.InitWorkerReference()) { return(""); } // Get the VisionLib transformation from the camera component Vector4 t; Quaternion q; VLUnityCameraHelper.CameraToVLPose( this.cam, this.rotCamera, out t, out q, this.workerBehaviour.flipCoordinateSystemHandedness); // Convert the transformation into JSON InitPose param = new InitPose(t.x, t.y, t.z, q.x, q.y, q.z, q.w); return(VLJsonUtility.ToJson(param, this.prettyPrint)); }
private static void DispatchAddModelCallback( string description, string data, System.UInt32 dataSize, IntPtr clientData) { if (description == null) { return; } try { VLModelDeserializationStructure answerStructure = VLJsonUtility.FromJson <VLModelDeserializationStructure>(description); VLModelTrackableBehaviour trackable = (VLModelTrackableBehaviour)GCHandle.FromIntPtr(clientData).Target; // free data previously allocated/pinned trackable.FreeNextBinaryMemoryBlock(); if (!String.IsNullOrEmpty(answerStructure.error)) { Debug.Log("[vlUnitySDK] VLModelTrackableBehaviour.DispatchAddModelCallback: Error occurred while adding a model to the tracking system: " + answerStructure.error); } if (answerStructure.result != null) { trackable.OnUpdateDeserializationResult(answerStructure.result); } } catch (Exception e) // Catch all exceptions, because this is a callback // invoked from native code { Debug.LogError("[vlUnitySDK] " + e.GetType().Name + ": " + e.Message); } }
/// <summary> /// Updates the transformation of all (sub)meshes in the tracking system. /// It has to be called after each update in a transform which is relevant /// for the location of a related mesh. /// </summary> /// <param name="useAllChildNodes"> /// If useAllChildNodes is true, this will update all locations of /// submeshes, even if they have their own VLModelTrackableBehaviour. It does /// not update the modelDescriptions of this behaviour. /// </param> public void UpdateTransformation(bool useAllChildNodes) { if (this.modelTrackerBehaviour.HasWorkerReference()) { VLWorker worker = this.modelTrackerBehaviour.workerBehaviour.GetWorker(); if (worker == null) { Debug.Log("[vlUnitySDK] VLModelTrackableBehaviour: Worker is not correctly initialized\n"); return; } AddModelDataCommandDescription command = this.GenerateModelDataDescriptor(false, useAllChildNodes); worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson(command), IntPtr.Zero, 0, null, IntPtr.Zero); } }
/// <summary> /// Writes this WorkSpace.Configuration into the specified file. /// </summary> /// <param name="fileName">Path of the file to write the data in.</param> /// <remarks> /// <para> /// It's possible to use vlSDK file schemes (e.g. local_storage_dir) here. /// </para> /// </remarks> public void WriteToFile(string fileName) { VLUnitySdk.Set(fileName, VLJsonUtility.ToJson(this), ""); }