Example #1
0
    private void OnGetInitPoseCallback(string errorJson, string resultJson)
    {
        if (errorJson != null)
        {
            Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: Failed to get init pose");
            return;
        }

        VLModelTrackerCommands.GetInitPoseResult result =
            VLJsonUtility.FromJson <VLModelTrackerCommands.GetInitPoseResult>(
                resultJson);

        Vector3    position;
        Quaternion orientation;

        VLUnityCameraHelper.VLPoseToCamera(
            new Vector3(result.t[0], result.t[1], result.t[2]),
            new Quaternion(result.q[0], result.q[1], result.q[2], result.q[3]),
            out position, out orientation,
            this.workerBehaviour.flipCoordinateSystemHandedness);

        if (this.initCamera != null)
        {
            this.initCamera.transform.position = position;
            this.initCamera.transform.rotation = orientation;
            this.originalPosition    = position;
            this.originalOrientation = orientation;
            this.ready = true;
        }
        else
        {
            Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: initCamera is null");
        }
    }
Example #2
0
 private static VIS.WorkSpace.Geometry CreateVLGeometry(VLGeometry creator,
                                                        VLUnityCameraHelper.FlipCoordinateSystemHandedness fcsHandedness)
 {
     return(creator.CreateVLGeometry(VLUnityCameraHelper.CreateLocalVLTransform(
                                         creator.gameObject,
                                         fcsHandedness)));
 }
Example #3
0
            /// <summary>
            /// Function for obtaining all positions of a geometry definition directly from vlSDK.
            /// </summary>
            /// <returns>Array of Unity coordinates, which are described by the given Geometry</returns>
            public Vector3[] GetCameraPositions()
            {
                int    positionsSize = 0;
                IntPtr positions     =
                    vlSDKUtil_getCameraPositionsFromGeometry(VLJsonUtility.ToJson(this), out positionsSize);

                if (positions == IntPtr.Zero)
                {
                    return(new Vector3[0]);
                }

                float[] positionsVector = new float[positionsSize * 3];
                Marshal.Copy(positions, positionsVector, 0, positionsSize * 3);
                Marshal.FreeHGlobal(positions);

                Vector3[] cameraPositions = new Vector3[positionsSize];

                for (int i = 0; i < positionsSize; i++)
                {
                    cameraPositions[i] = VLUnityCameraHelper.VLPoseToUnityPose(new Vector3(
                                                                                   positionsVector[i * 3], positionsVector[i * 3 + 1], positionsVector[i * 3 + 2]));
                }

                return(cameraPositions);
            }
    private void OnGetInitPoseCallback(string errorJson, string resultJson)
    {
        if (errorJson != null)
        {
            Debug.LogWarning("[vlUnitySDK] OnGetInitPoseCallback: Failed to get init pose");
            return;
        }

        VLModelTrackerCommands.GetInitPoseResult result =
            VLJsonUtility.FromJson <VLModelTrackerCommands.GetInitPoseResult>(
                resultJson);

        Vector3    position;
        Quaternion orientation;

        VLUnityCameraHelper.VLPoseToCamera(
            new Vector3(result.t[0], result.t[1], result.t[2]),
            new Quaternion(result.q[0], result.q[1], result.q[2], result.q[3]),
            out position, out orientation);

        this.originalPosition    = position;
        this.originalOrientation = orientation;
        this.initPoseReady       = true;
        this.reset = true; // This will set the new pose during the next Update
                           // call
    }
Example #5
0
    private VIS.WorkSpace.Geometry GetDestinationGeometry(
        VLUnityCameraHelper.FlipCoordinateSystemHandedness fcsHandedness)
    {
        VLGeometry creator = this.destinationGeometry.GetComponent <VLGeometry>();

        // check if object has a geometry component
        if (creator != null)
        {
            return(CreateVLGeometry(creator, fcsHandedness));
        }
        else
        {
            // if target object has no geometry component -> use center point which is stored in
            // destinationPoints[0]
            VIS.WorkSpace.Transform trans = VLUnityCameraHelper.CreateLocalVLTransform(
                this.destinationGeometry,
                fcsHandedness);
            Vector3[] destination = GetDestinationVertices();
            trans.t = new float[3] {
                destination[0].x, destination[0].y, destination[0].z
            };

            return(new VIS.WorkSpace.Plane(
                       0,
                       0,
                       1,
                       trans));
        }
    }
Example #6
0
    private static VLModelTrackerCommands.SetInitPoseCmd.Param CameraToInitParam(
        Camera camera, Matrix4x4 offset)
    {
        Vector4    t;
        Quaternion q;

        VLUnityCameraHelper.CameraToVLPose(camera, offset, out t, out q);
        return(new VLModelTrackerCommands.SetInitPoseCmd.Param(
                   t.x, t.y, t.z, q.x, q.y, q.z, q.w));
    }
Example #7
0
 public bool GetProjectionMatrix(float nearFact,
                                 float farFact,
                                 int screenWidth,
                                 int screenHeight,
                                 ScreenOrientation screenOrientation,
                                 int mode,
                                 float[] matrix)
 {
     return(GetProjectionMatrix(nearFact, farFact, screenWidth, screenHeight, VLUnityCameraHelper.GetRenderRotation(screenOrientation), mode, matrix));
 }
Example #8
0
    private void UpdateInitPose(Camera cam)
    {
        // Get the VisionLib transformation from the Unity camera
        VLUnityCameraHelper.CameraToVLPose(
            cam, this.rotCamera, out this.t, out this.q);

        // Convert the transformation into JSON
        this.initPose.Set(t, q);
        this.initPoseString = VLJsonUtility.ToJson(this.initPose,
                                                   this.prettyPrint);
    }
Example #9
0
    private static VLModelTrackerCommands.SetInitPoseCmd.Param CameraToInitParam(
        Camera camera, Matrix4x4 offset,
        VLUnityCameraHelper.FlipCoordinateSystemHandedness flipCSHandedness =
        VLUnityCameraHelper.FlipCoordinateSystemHandedness.Automatically)
    {
        Vector4    t;
        Quaternion q;

        VLUnityCameraHelper.CameraToVLPose(camera, offset, out t, out q, flipCSHandedness);
        return(new VLModelTrackerCommands.SetInitPoseCmd.Param(
                   t.x, t.y, t.z, q.x, q.y, q.z, q.w));
    }
    public bool SetFromCamera(Camera camera)
    {
        if (this.disposed)
        {
            throw new ObjectDisposedException("VLExtrinsicDataWrapper");
        }

        // TODO(mbuchner): Implement more of this in native code

        bool result = true;

        Matrix4x4 worldToCameraMatrix = camera.worldToCameraMatrix;

        // Convert from left-handed to right-handed model-view matrix
        worldToCameraMatrix[0, 2] = -worldToCameraMatrix[0, 2];
        worldToCameraMatrix[1, 2] = -worldToCameraMatrix[1, 2];
        worldToCameraMatrix[2, 2] = -worldToCameraMatrix[2, 2];
        // Convert from OpenGL coordinates into VisionLib coordinates
        worldToCameraMatrix = VLUnityCameraHelper.flipYZ * worldToCameraMatrix;

        // Position

        Vector4 t = worldToCameraMatrix.GetColumn(3);

        float[] tData = new float[3];
        tData[0] = t.x;
        tData[1] = t.y;
        tData[2] = t.z;
        if (!this.SetT(tData))
        {
            result = false;
        }

        // Orientation

        Quaternion q = VLUnityCameraHelper.QuaternionFromMatrix(
            worldToCameraMatrix);

        float[] qData = new float[4];
        qData[0] = q.x;
        qData[1] = q.y;
        qData[2] = q.z;
        qData[3] = q.w;
        if (!this.SetR(qData))
        {
            result = false;
        }

        return(result);
    }
    private void SetInitPose()
    {
        if (!this.ready)
        {
            Debug.LogWarning("[vlUnitySDK] SetInitPose called while not ready");
            return;
        }

        GameObject content = (this.holoLensTrackerBehaviour != null ?
                              this.holoLensTrackerBehaviour.content : null);

        if (content == null)
        {
            return;
        }

        // Turn the camera pose into a content transformation
        Matrix4x4 worldToInitCameraMatrix =
            this.initCamera.transform.worldToLocalMatrix;
        Vector3 initContentLocalPosition =
            worldToInitCameraMatrix.GetColumn(3);
        Quaternion initContentLocalOrientation =
            VLUnityCameraHelper.QuaternionFromMatrix(
                worldToInitCameraMatrix);

        // Remove the anchor first, otherwise the content transformation can't
        // be updated
        ClearContentAnchor();

        content.transform.localPosition =
            initContentLocalPosition;
        content.transform.localRotation =
            initContentLocalOrientation;
        if (this.keepUpright)
        {
            Vector3    contentUp  = content.transform.rotation * this.upAxis;
            Quaternion upRotation = Quaternion.FromToRotation(
                contentUp, Vector3.up);
            content.transform.rotation =
                upRotation * content.transform.rotation;
        }

        // Add an anchor with a certain name in order to transfer the init pose
        // to the VisionLib
        SaveContentAnchor();
    }
Example #12
0
    /// <summary>
    ///  Returns the current transformation of the Camera component in
    ///  VisionLib coordinates as JSON string.
    /// </summary>
    /// <returns>
    ///  JSON string with initial pose in VisionLib coordinates.
    /// </returns>
    public string GetInitPoseJson()
    {
        if (!this.InitWorkerReference())
        {
            return("");
        }

        // Get the VisionLib transformation from the camera component
        Vector4    t;
        Quaternion q;

        VLUnityCameraHelper.CameraToVLPose(
            this.cam, this.rotCamera, out t, out q,
            this.workerBehaviour.flipCoordinateSystemHandedness);

        // Convert the transformation into JSON
        InitPose param = new InitPose(t.x, t.y, t.z, q.x, q.y, q.z, q.w);

        return(VLJsonUtility.ToJson(param, this.prettyPrint));
    }
Example #13
0
    /// <summary>
    /// Creates a VLWorkSpace.Definition from this VLWorkSpace.
    /// </summary>
    /// <returns>VLWorkSpace.Definition described by this class</returns>
    public VIS.WorkSpace.Definition GetWorkSpaceDefinition(bool useCameraRotation)
    {
        VLUnityCameraHelper.FlipCoordinateSystemHandedness fcsHandedness =
            VLUnityCameraHelper.FlipCoordinateSystemHandedness.Automatically;
        if (InitWorkerReference())
        {
            fcsHandedness = this.workerBehaviour.flipCoordinateSystemHandedness;
        }

        VIS.WorkSpace.Definition currentWorkSpaceDef =
            new VIS.WorkSpace.Definition(
                new VIS.WorkSpace.Transform(
                    this.transform.position,
                    this.transform.rotation),
                VLUnityCameraHelper.UnityVectorToVLVector(this.upVector, fcsHandedness),
                useCameraRotation ? defaultRotationRange : 0.0f,
                defaultRotationStep);

        currentWorkSpaceDef.origin      = GetSourceGeometry(fcsHandedness);
        currentWorkSpaceDef.destination = GetDestinationGeometry(fcsHandedness);

        return(currentWorkSpaceDef);
    }
Example #14
0
    /// <summary>
    /// Set preview camera to one of the poses regarding the camStepPosition.
    /// </summary>
    /// <param name="camStepPos">camera step index</param>
    private void SetPreviewCamera(int camStepPos)
    {
        if (this.workSpace.poses == null ||
            this.workSpace.poses.Length <= camStepPos ||
            this.workSpace.usedCamera == null)
        {
            return;
        }

        float[]    q = this.workSpace.poses[camStepPos].q;
        float[]    t = this.workSpace.poses[camStepPos].t;
        Quaternion orientation;
        Vector3    position;

        VLUnityCameraHelper.VLPoseToCamera(
            new Vector3(t[0], t[1], t[2]),
            new Quaternion(q[0], q[1], q[2], q[3]),
            out position, out orientation,
            VLUnityCameraHelper.FlipCoordinateSystemHandedness.Automatically);

        this.workSpace.usedCamera.transform.position = position;
        this.workSpace.usedCamera.transform.rotation = orientation;
    }
Example #15
0
 private void OnOrientationChange(ScreenOrientation orientation)
 {
     this.renderRotation = VLUnityCameraHelper.GetRenderRotation(orientation);
     this.UpdateBackgroundSize();
 }
Example #16
0
    /// <summary>
    /// Add a ModelDataDescriptors of the mesh inside the transform to the
    /// commandDescriptor
    /// </summary>
    /// <param name="transform">
    /// Transform, which is searched for possible meshes.
    /// </param>
    /// <param name="useAllChildNodes">
    /// If true: Also process child nodes, which are administered by another
    /// VLModelTrackableBehaviour
    /// </param>
    /// <param name="addDataDescriptor">
    /// Fills the ModelDataDescriptor with a DataDescriptor, a description of the
    /// data structure of the model data.
    /// </param>
    /// <param name="commandDescriptor">
    /// Reference to the command structure of the json command, which will be
    /// filled by this function.
    /// </param>
    /// <returns><c>True</c> if the model could be serialized into the visionlib.</c>False</c> if the data could not be gathered.</returns>
    private bool AddModelDescription(
        Transform transform,
        bool useAllChildNodes,
        bool addDataDescription,
        ref AddModelDataCommandDescription commandDescriptor)
    {
        // If transform is not active, do not add the model
        if (!transform.gameObject.activeInHierarchy)
        {
            return(false);
        }

        // See if another VLModelTrackableBehaviour is active in this transform. If
        // this is the case, break execution of this node and its children.
        VLModelTrackableBehaviour trackable =
            transform.GetComponent <VLModelTrackableBehaviour>();

        if (!useAllChildNodes &&
            trackable &&
            trackable != this &&
            trackable.enabled)
        {
            return(false);
        }
        if (trackable == null)
        {
            trackable = this;
        }


        Quaternion rotation    = transform.rotation;
        Vector3    globalScale = GetGlobalScale(transform);
        Vector3    position    = transform.position;

        // On HoloLens, the content node is added to the camera and thus the
        // transformation of the mesh will be changed. This change has to be
        // removed when streaming the data into the vlSDK
        Transform contentTransform = getContentTransform();

        if (contentTransform != null)
        {
            Vector3 contentGlobalScale = GetGlobalScale(contentTransform);
            rotation =
                Quaternion.Inverse(contentTransform.rotation) * rotation;
            globalScale = new Vector3(
                globalScale.x / contentGlobalScale.x,
                globalScale.y / contentGlobalScale.y,
                globalScale.z / contentGlobalScale.z);
            position =
                Quaternion.Inverse(contentTransform.rotation) *
                (position - contentTransform.position);
        }

        VLUnityCameraHelper.ToVLInPlace(ref position, ref rotation, modelTrackerBehaviour.workerBehaviour.flipCoordinateSystemHandedness);

        MeshFilter mesh = transform.GetComponent <MeshFilter>();
        string     uniqueUnityModelID = mesh.GetInstanceID().ToString();

        ModelTransform modelTransform = new ModelTransform();

        modelTransform.t = new float[] { position.x, position.y, position.z };
        modelTransform.s = new float[]
        { globalScale.x, globalScale.y, globalScale.z };
        modelTransform.q = new float[]
        { rotation.x, rotation.y, rotation.z, rotation.w };

        ModelDataDescriptor descriptor = new ModelDataDescriptor();

        descriptor.name      = uniqueUnityModelID;
        descriptor.type      = "model";
        descriptor.enabled   = trackable.useForTracking;
        descriptor.occluder  = trackable.occluder;
        descriptor.transform = modelTransform;
        if (addDataDescription)
        {
            descriptor.subModels = new BinaryDataDescriptor[]
            { CreateDataDescriptor(mesh) };
        }

        commandDescriptor.models.Add(descriptor);

        return(true);
    }
Example #17
0
 private void OnOrientationChange(ScreenOrientation orientation)
 {
     this.renderRotation = VLUnityCameraHelper.GetRenderRotation(orientation);
     this.invRotCamera   = VLUnityCameraHelper.GetRenderRotationMatrixFromVLToUnity(this.renderRotation);
     this.rotCamera      = VLUnityCameraHelper.GetRenderRotationMatrixFromUnityToVL(this.renderRotation);
 }
Example #18
0
    private void SetInitPose()
    {
        if (!IsReady())
        {
            Debug.LogWarning("[vlUnitySDK] SetInitPose called while not ready");
            return;
        }

        GameObject content = (this.holoLensTrackerBehaviour != null ?
                              this.holoLensTrackerBehaviour.content : null);

        if (content == null)
        {
            Debug.LogWarning("[vlUnitySDK] SetInitPose: No VLHoloLensTrackerBehaviour in the scene or its content is not specified");
            return;
        }

        if (!this.InitWorkerReference() || !workerBehaviour.GetTrackingRunning())
        {
            return;
        }

        // To prevent the vlSDK from getting more `SetGlobalObjectPose` calls
        // than can be processed in time, we limit the amount of
        // `SetGlobalObjectPose` commands.
        if (this.setGlobalObjectPoseCounter >= maxSetGlobalObjectPoseCommands)
        {
            return;
        }
        this.setGlobalObjectPoseCounter += 1;

        // Turn the camera pose into a content transformation
        Matrix4x4 worldToInitCameraMatrix =
            this.initCamera.transform.worldToLocalMatrix;
        Vector3 initContentLocalPosition =
            worldToInitCameraMatrix.GetColumn(3);
        Quaternion initContentLocalOrientation =
            VLUnityCameraHelper.QuaternionFromMatrix(
                worldToInitCameraMatrix);

        content.transform.localPosition =
            initContentLocalPosition;
        content.transform.localRotation =
            initContentLocalOrientation;
        if (this.keepUpright)
        {
            Vector3    contentUp  = content.transform.rotation * this.upAxis;
            Quaternion upRotation = Quaternion.FromToRotation(
                contentUp, Vector3.up);
            content.transform.rotation =
                upRotation * content.transform.rotation;
        }

        Matrix4x4 globalObjectMatrix = content.transform.localToWorldMatrix;

        if (this.workerBehaviour.flipCoordinateSystemHandedness ==
            VLUnityCameraHelper.FlipCoordinateSystemHandedness.Automatically)
        {
            globalObjectMatrix *= VLUnityCameraHelper.rotationY180;
        }

        // Compute the right-handed global object transformation
        globalObjectMatrix = VLUnityCameraHelper.flipZ *
                             globalObjectMatrix *
                             VLUnityCameraHelper.flipZ;

        // We need to provide the global coordinate system once and push the
        // current position of the content to the tracker in every frame.
        Vector3    t = globalObjectMatrix.GetColumn(3);
        Quaternion q = Quaternion.LookRotation(
            globalObjectMatrix.GetColumn(2),
            globalObjectMatrix.GetColumn(1));

        VLHoloLensModelTrackerCommands.SetGlobalObjectPoseCmd.Param param =
            new VLHoloLensModelTrackerCommands.SetGlobalObjectPoseCmd.Param(
                t.x,
                t.y,
                t.z,
                q.x,
                q.y,
                q.z,
                q.w);

        this.worker.PushCommand(
            new VLHoloLensModelTrackerCommands.SetGlobalObjectPoseCmd(param),
            dispatchSetGlobalObjectPoseCallbackDelegate,
            GCHandle.ToIntPtr(this.gcHandle));
    }