/// <summary>
        /// Let the native plugin able to modify the mesh vertex buffer.
        /// Assume the mesh contains only vertices components.
        /// The vertex position of the mesh will be modified.
        /// </summary>
        private void InitializeNativeMeshManipulation()
        {
            Mesh mesh = GetComponent <MeshFilter>().mesh;

            // Get the vertices if the origin of the vertices was at TrackedCameraObject.
            Transform            objectToWorldTransform = transform;
            ViveSR_TrackedCamera trackedCamera;

            if (AttachedCameraIndex == DualCameraIndex.LEFT)
            {
                trackedCamera = ViveSR_DualCameraRig.Instance.TrackedCameraLeft;
            }
            else
            {
                trackedCamera = ViveSR_DualCameraRig.Instance.TrackedCameraRight;
            }
            Transform cameraToWorldTransform = trackedCamera.transform;

            Vector3[] vertices = mesh.vertices;
            for (int i = 0; i < vertices.Length; ++i)
            {
                // Transform the vertex from the object space to the camera space.
                vertices[i] = cameraToWorldTransform.InverseTransformPoint(objectToWorldTransform.TransformPoint(vertices[i]));
            }

            // Send mesh buffers to the plugin.
            mesh.MarkDynamic();
            GCHandle gcVertices = GCHandle.Alloc(vertices, GCHandleType.Pinned);

            // Send data to ViveSR Framework.
            switch (CameraIndex)
            {
            case DualCameraIndex.LEFT:
                if (IsForCalibration)
                {
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_CALIB_NATIVE_VERTEX_BUFFER_PTR_L, mesh.GetNativeVertexBufferPtr(0));
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_CALIB_NATIVE_VERTEX_ARRAY_L, gcVertices.AddrOfPinnedObject());
                }
                else
                {
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_NATIVE_VERTEX_BUFFER_PTR_L, mesh.GetNativeVertexBufferPtr(0));
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_NATIVE_VERTEX_ARRAY_L, gcVertices.AddrOfPinnedObject());
                }
                break;

            case DualCameraIndex.RIGHT:
                if (IsForCalibration)
                {
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_CALIB_NATIVE_VERTEX_BUFFER_PTR_R, mesh.GetNativeVertexBufferPtr(0));
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_CALIB_NATIVE_VERTEX_ARRAY_R, gcVertices.AddrOfPinnedObject());
                }
                else
                {
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_NATIVE_VERTEX_BUFFER_PTR_R, mesh.GetNativeVertexBufferPtr(0));
                    ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.MESH_NATIVE_VERTEX_ARRAY_R, gcVertices.AddrOfPinnedObject());
                }
                break;
            }
            gcVertices.Free();

            // Set the mesh local bounding box large enough such that the mesh is always visible.
            Bounds bounds = mesh.bounds;

            bounds.extents = new Vector3(100f, 100f, 100f);
            mesh.bounds    = bounds;
        }
示例#2
0
        /// <summary>
        /// Initialize the image capturing tool.
        /// </summary>
        /// <returns></returns>
        public static int Initial()
        {
            GetParameters();
            InitialDistortedPtrSize   = false;
            InitialUndistortedPtrSize = false;
            InitialDepthPtrSize       = false;
            TextureDistortedLeft      = new Texture2D(DistortedImageWidth, DistortedImageHeight, TextureFormat.RGBA32, false);
            TextureDistortedRight     = new Texture2D(DistortedImageWidth, DistortedImageHeight, TextureFormat.RGBA32, false);
            TextureUndistortedLeft    = new Texture2D(UndistortedImageWidth, UndistortedImageHeight, TextureFormat.RGBA32, false);
            TextureUndistortedRight   = new Texture2D(UndistortedImageWidth, UndistortedImageHeight, TextureFormat.RGBA32, false);
            TextureDepth = new Texture2D(DepthImageWidth, DepthImageHeight, TextureFormat.RFloat, false);

#if USE_DISTORT_TEX_NATIVE_BUFFER
            DistortTextureIsNative = true;
            var deviceTexture = new Texture2D(2, 2);
            ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.VIDEO_RES_NATIVE_PTR, deviceTexture.GetNativeTexturePtr());
            ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.VIDEO_RES_VIEW_NATIVE_PTR, nativeTex);
#endif

#if USE_UNDISTORT_TEX_NATIVE_BUFFER
            UndistortTextureIsNative = true;
            TextureUndistortedLeft.Apply();
            TextureUndistortedRight.Apply();
            ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.IMAGE_NATIVE_TEXTURE_PTR_L, TextureUndistortedLeft.GetNativeTexturePtr());
            ViveSR_Framework.SetParameterNativePtr(ViveSR_Framework.MODULE_ID_SEETHROUGH, (int)SeeThroughParam.IMAGE_NATIVE_TEXTURE_PTR_R, TextureUndistortedRight.GetNativeTexturePtr());
#endif

            DataInfoDistorted = ViveSR_Framework.CreateDataInfo(new IntPtr[] {
#if USE_DISTORT_TEX_NATIVE_BUFFER
                IntPtr.Zero,                                                                                // DISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                // DISTORTED_FRAME_RIGHT
                IntPtr.Zero,                                                                                // UNDISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                // UNDISTORTED_FRAME_RIGHT
#else
                Marshal.AllocCoTaskMem(DistortedImageWidth * DistortedImageHeight * DistortedImageChannel), // LEFT_FRAME
                Marshal.AllocCoTaskMem(DistortedImageWidth * DistortedImageHeight * DistortedImageChannel), // RIGHT_FRAME
                IntPtr.Zero,                                                                                // UNDISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                // UNDISTORTED_FRAME_RIGHT
#endif
                Marshal.AllocCoTaskMem(sizeof(int)),                                                        // FRAME_SEQ
                Marshal.AllocCoTaskMem(sizeof(int)),                                                        // TIME_STP
                Marshal.AllocCoTaskMem(sizeof(float) * 16),                                                 // LEFT_POSE
                Marshal.AllocCoTaskMem(sizeof(float) * 16),
            });                                                                                             // RIGHT_POSE

            DataInfoUndistorted = ViveSR_Framework.CreateDataInfo(new IntPtr[] {
#if USE_UNDISTORT_TEX_NATIVE_BUFFER
                IntPtr.Zero,                                                                                      // DISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                      // DISTORTED_FRAME_RIGHT
                IntPtr.Zero,                                                                                      // UNDISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                      // UNDISTORTED_FRAME_RIGHT
#else
                IntPtr.Zero,                                                                                      // DISTORTED_FRAME_LEFT
                IntPtr.Zero,                                                                                      // DISTORTED_FRAME_RIGHT
                Marshal.AllocCoTaskMem(UndistortedImageWidth * UndistortedImageHeight * UndistortedImageChannel), // UNDISTORTED_FRAME_LEFT
                Marshal.AllocCoTaskMem(UndistortedImageWidth * UndistortedImageHeight * UndistortedImageChannel), // UNDISTORTED_FRAME_RIGHT
#endif
                Marshal.AllocCoTaskMem(sizeof(int)),                                                              // FRAME_SEQ
                Marshal.AllocCoTaskMem(sizeof(int)),                                                              // TIME_STP
                Marshal.AllocCoTaskMem(sizeof(float) * 16),                                                       // LEFT_POSE
                Marshal.AllocCoTaskMem(sizeof(float) * 16),
            });                                                                                                   // RIGHT_POSE

            // To use enum DepthDataMask, It needs to assign each type.
            DataInfoDepth = ViveSR_Framework.CreateDataInfo(new IntPtr[] {
                IntPtr.Zero,                                                                                        // LEFT_FRAME
                Marshal.AllocCoTaskMem(DepthImageWidth * DepthImageHeight * DepthImageChannel * DepthDataSize),     // DEPTH_MAP
                Marshal.AllocCoTaskMem(sizeof(int)),                                                                // FRAME_SEQ
                Marshal.AllocCoTaskMem(sizeof(int)),                                                                // TIME_STP
                Marshal.AllocCoTaskMem(sizeof(float) * 16)
            });                                                                                                     // POSE

            return((int)Error.WORK);
        }