/// <summary> /// Function for obtaining all positions of a geometry definition directly from vlSDK. /// </summary> /// <returns>Array of Unity coordinates, which are described by the given Geometry</returns> public Vector3[] GetCameraPositions() { int positionsSize = 0; IntPtr positions = vlSDKUtil_getCameraPositionsFromGeometry(VLJsonUtility.ToJson(this), out positionsSize); if (positions == IntPtr.Zero) { return(new Vector3[0]); } float[] positionsVector = new float[positionsSize * 3]; Marshal.Copy(positions, positionsVector, 0, positionsSize * 3); Marshal.FreeHGlobal(positions); Vector3[] cameraPositions = new Vector3[positionsSize]; for (int i = 0; i < positionsSize; i++) { cameraPositions[i] = VLUnityCameraHelper.VLPoseToUnityPose(new Vector3( positionsVector[i * 3], positionsVector[i * 3 + 1], positionsVector[i * 3 + 2])); } return(cameraPositions); }
/// <summary> /// Function for obtaining all poses of a WorkSpace definition directly from vlSDK. /// </summary> /// <returns> /// Array of Workspace.Transform in the vlSDK coordinate system, which represent all /// camera poses described by this WorkSpace.Definition /// </returns> public Transform[] GetCameraTransforms() { int transformsSize = 0; IntPtr transforms = vlSDKUtil_getCameraTransformsFromWorkspaceDefinition( VLJsonUtility.ToJson(this), out transformsSize); if (transforms == IntPtr.Zero) { return(new WorkSpace.Transform[0]); } float[] transformsVector = new float[transformsSize * 7]; Marshal.Copy(transforms, transformsVector, 0, transformsSize * 7); Marshal.FreeHGlobal(transforms); WorkSpace.Transform[] result = new WorkSpace.Transform[transformsSize]; for (int i = 0; i < transformsSize; i++) { result[i] = new WorkSpace.Transform( new Vector3( transformsVector[i * 7 + 0], transformsVector[i * 7 + 1], transformsVector[i * 7 + 2]), new Quaternion( transformsVector[i * 7 + 3], transformsVector[i * 7 + 4], transformsVector[i * 7 + 5], transformsVector[i * 7 + 6])); } return(result); }
/// <summary> /// Add all (sub)meshes to the tracking system. If a sub mesh has its own /// VLModelTrackableBehaviour, it will not be added, but this behaviour should /// manage the relevant submeshes. /// </summary> public void UpdateModel(bool success = true) { if (success && this.modelTrackerBehaviour.workerBehaviour) { VLWorker worker = this.modelTrackerBehaviour.workerBehaviour.GetWorker(); if (worker == null) { Debug.Log("[vlUnitySDK] VLModelTrackableBehaviour.UpdateModel: Worker is not correctly initialized\n"); return; } AddModelDataCommandDescription command = this.GenerateModelDataDescriptor(true); byte[] binaryData = this.GenerateBinaryData( this.modelData, this.binaryOffset); GCHandle binaryDataHandle = GCHandle.Alloc(binaryData, GCHandleType.Pinned); IntPtr data = binaryDataHandle.AddrOfPinnedObject(); UInt32 dataLength = Convert.ToUInt32(binaryData.Length); gcHandleQueue.Enqueue(binaryDataHandle); worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson(command), data, dataLength, dispatchAddModelCallbackDelegate, GCHandle.ToIntPtr(this.gcHandle)); } }
private void SetGlobalCoordinateSystemInVisionLib() { if (this.globalCoordinateSystem != IntPtr.Zero) { Marshal.Release(this.globalCoordinateSystem); } this.globalCoordinateSystem = WorldManager.GetNativeISpatialCoordinateSystemPtr(); if (this.globalCoordinateSystem == IntPtr.Zero) { Debug.LogError("[vlUnitySDK] Failed to retrieve spatial coordinate system"); return; } if (!this.InitWorkerReference()) { Debug.LogError("[vlUnitySDK] Failed to get the VLWorker from VLHoloLensinitCameraBehaviour for SetGlobalCoordinateSystemInVisionLib"); return; } this.worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson( new VLHoloLensModelTrackerCommands. SetGlobalCoordinateSystemCommandDescription()), this.globalCoordinateSystem, 0, null, IntPtr.Zero); }
/// <summary> /// Enqueues a command for the tracking thread. /// </summary> /// <remarks> /// <para> /// The command gets processed asynchronously by the tracking thread and /// a callback will called once after the processing has finished. /// </para> /// <para> /// The different commands are defined inside the /// <see cref="VLWorkerCommands"/> namespace. /// </para> /// </remarks> /// <param name="cmd"> /// The command object. /// </param> /// <param name="callback"> /// Callback, which will be called inside <see cref="ProcessCallbacks"/> /// after the command was processed. /// </param> /// <param name="clientData"> /// The callback function will be called with the given pointer value. /// </param> /// <returns> /// <c>true</c>, if the command was enqueue successfully; /// <c>false</c> otherwise. /// </returns> public bool PushCommand(VLWorkerCommands.CommandBase cmd, JsonStringCallback callback, IntPtr clientData) { if (this.disposed) { throw new ObjectDisposedException("VLWorker"); } return(vlWorker_PushJsonCommand(this.handle, VLJsonUtility.ToJson(cmd), callback, clientData)); }
private void UpdateInitPose(Camera cam) { // Get the VisionLib transformation from the Unity camera VLUnityCameraHelper.CameraToVLPose( cam, this.rotCamera, out this.t, out this.q); // Convert the transformation into JSON this.initPose.Set(t, q); this.initPoseString = VLJsonUtility.ToJson(this.initPose, this.prettyPrint); }
/// <summary> /// Returns the current transformation of the Camera component in /// VisionLib coordinates as JSON string. /// </summary> /// <returns> /// JSON string with initial pose in VisionLib coordinates. /// </returns> public string GetInitPoseJson() { if (!this.InitWorkerReference()) { return(""); } // Get the VisionLib transformation from the camera component Vector4 t; Quaternion q; VLUnityCameraHelper.CameraToVLPose( this.cam, this.rotCamera, out t, out q, this.workerBehaviour.flipCoordinateSystemHandedness); // Convert the transformation into JSON InitPose param = new InitPose(t.x, t.y, t.z, q.x, q.y, q.z, q.w); return(VLJsonUtility.ToJson(param, this.prettyPrint)); }
/// <summary> /// Updates the transformation of all (sub)meshes in the tracking system. /// It has to be called after each update in a transform which is relevant /// for the location of a related mesh. /// </summary> /// <param name="useAllChildNodes"> /// If useAllChildNodes is true, this will update all locations of /// submeshes, even if they have their own VLModelTrackableBehaviour. It does /// not update the modelDescriptions of this behaviour. /// </param> public void UpdateTransformation(bool useAllChildNodes) { if (this.modelTrackerBehaviour.HasWorkerReference()) { VLWorker worker = this.modelTrackerBehaviour.workerBehaviour.GetWorker(); if (worker == null) { Debug.Log("[vlUnitySDK] VLModelTrackableBehaviour: Worker is not correctly initialized\n"); return; } AddModelDataCommandDescription command = this.GenerateModelDataDescriptor(false, useAllChildNodes); worker.PushJsonAndBinaryCommand( VLJsonUtility.ToJson(command), IntPtr.Zero, 0, null, IntPtr.Zero); } }
/// <summary> /// Writes this WorkSpace.Configuration into the specified file. /// </summary> /// <param name="fileName">Path of the file to write the data in.</param> /// <remarks> /// <para> /// It's possible to use vlSDK file schemes (e.g. local_storage_dir) here. /// </para> /// </remarks> public void WriteToFile(string fileName) { VLUnitySdk.Set(fileName, VLJsonUtility.ToJson(this), ""); }