Esempio n. 1
0
        void SetRenderingCameraTransformation()
        {
            if (!m_useCalibratedProjector)
            {
                // Normal Camera -> Use Normal Transformation from UI Panel
                Camera.main.gameObject.transform.eulerAngles = new Vector3(90, 0, 0);
                if (m_projectorHeight != default)
                {
                    Camera.main.gameObject.transform.position = new Vector3(0, m_projectorHeight * 0.01f, 0); // m_cameraHeight -> cm
                }
            }
            else
            {
                // Calibrated Camera -> Use Calibrated Transformation / Projection by calculation.
                if (m_cameraTransform_Unity_Mat4x4 != default)
                {
                    Camera.main.gameObject.transform.rotation = DanbiComputeShaderHelper.GetRotation(m_cameraTransform_Unity_Mat4x4);
                }

                if (m_unityCamPos != default)
                {
                    Camera.main.gameObject.transform.position = m_unityCamPos;
                }
            }
        }
Esempio n. 2
0
        // TODO:
        // bool CheckAllInternalValuesValid()
        // {
        //     // return (DanbiManager.instance.screen.screenResolution.x != m_errorVal &&
        //     //         DanbiManager.instance.screen.screenResolution.y != m_errorVal &&
        //     //         m_cameraInternalData.focalLength.x != m_errorVal &&

        // }

        // bool CheckAllExternalValuesValid()
        // {

        // }

        void PrepareInternalParametersToMatrix()
        {
            // 3. Create the Projection Matrix
            float width  = DanbiManager.instance.screen.screenResolution.x; // width = 3840 =  Projector Width
            float height = DanbiManager.instance.screen.screenResolution.y; // height = 2160 = Projector Height

            float left   = 0;
            float right  = width;
            float bottom = 0;
            float top    = height;

            float near = Camera.main.nearClipPlane;      // near: positive
            float far  = Camera.main.farClipPlane;       // far: positive

            // float aspectRatio = width / height;

            float scaleFactorX = m_cameraInternalData.focalLength.x;
            float scaleFactorY = m_cameraInternalData.focalLength.y;

            float cx = m_cameraInternalData.principalPoint.x;
            float cy = m_cameraInternalData.principalPoint.y;

            // http://ksimek.github.io/2013/06/03/calibrated_cameras_in_opengl/
            //we can think of the perspective transformation as converting
            // a trapezoidal-prism - shaped viewing volume
            //    into a rectangular - prism - shaped viewing volume,
            //    which glOrtho() scales and translates into the 2x2x2 cube in Normalized Device Coordinates.

            Matrix4x4 NDCMatrix_OpenGL = DanbiComputeShaderHelper.GetOrthoMat(left, right, bottom, top, near, far);

            //  // refer to to   //http://ksimek.github.io/2012/08/14/decompose/  to  understand the following code
            Matrix4x4 KMatrixFromOpenCVToOpenGL = DanbiComputeShaderHelper.OpenCVKMatrixToOpenGLKMatrix(scaleFactorX, scaleFactorY, cx, cy, near, far);

            // our discussion of 2D coordinate conventions have referred to the coordinates used during calibration.
            // If your application uses a different 2D coordinate convention,
            //  you'll need to transform K using 2D translation and reflection.

            //  For example, consider a camera matrix that was calibrated with the origin in the top-left
            //  and the y - axis pointing downward, but you prefer a bottom-left origin with the y-axis pointing upward.
            //  To convert, you'll first negate the image y-coordinate and then translate upward by the image height, h.
            //  The resulting intrinsic matrix K' is given by:

            // K' = [ 1 0 0; 0 1 h; 0 0 1] *  [ 1 0 0; 0 -1 0; 0 0 1] * K

            Vector4 column0 = new Vector4(1f, 0f, 0f, 0f);
            Vector4 column1 = new Vector4(0f, -1f, 0f, 0f);
            Vector4 column2 = new Vector4(0f, 0f, 1f, 0f);
            Vector4 column3 = new Vector4(0f, height, 0f, 1f);

            Matrix4x4 OpenCVCameraToOpenGLCamera = new Matrix4x4(column0, column1, column2, column3);

            m_calibratedProjectionMatrixGL = NDCMatrix_OpenGL * OpenCVCameraToOpenGLCamera * KMatrixFromOpenCVToOpenGL;
            SetRenderingCameraTransformation();
        }
Esempio n. 3
0
        public void RebuildMeshInfo()
        {
            var control = DanbiManager.instance.shaderControl;

            switch (m_texType)
            {
            case EDanbiTextureType.Regular:
                m_panoramaRegular.RebuildMeshInfoForComputeShader(ref m_panoramaShapeData);
                break;

            case EDanbiTextureType.Faces4:
                m_panorama4faces.RebuildMeshInfoForComputeShader(ref m_panoramaShapeData);
                break;

            case EDanbiTextureType.Panorama:
                m_panorama360.RebuildMeshInfoForComputeShader(ref m_panoramaShapeData);
                break;
            }
            m_reflector.RebuildMeshInfoForComputeShader(ref m_reflectorShapeData);

            // 5. reflector mesh shape data
            switch (m_meshType)
            {
            // case EDanbiPrewarperSetting_MeshType.Custom_Cone:
            //     // control.buffersDic.Add("_", DanbiComputeShaderHelper.CreateComputeBuffer_Ret<DanbiConeData_struct>((reflectorShapeData as DanbiConeData).asStruct, 1));
            //     break;

            // case EDanbiPrewarperSetting_MeshType.Custom_Cylinder:
            //     // control.buffersDic.Add("_", DanbiComputeShaderHelper.CreateComputeBuffer_Ret<DanbiCylinderData_struct>((reflectorShapeData as DanbiCylinderData).asStruct, 1));
            //     break;

            case EDanbiPrewarperSetting_MeshType.Custom_Dome:
                var domeData = m_reflectorShapeData as DanbiDomeData;
                var domeDataComputeBuffer = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(domeData.asStruct, domeData.stride);
                control.bufferDict.AddBuffer_NoDuplicate("_DomeData", domeDataComputeBuffer);
                break;
            }

            // 6. panorama mesh shape data
            // since panorama shares same shape data, there's no need to make overlaps.
            var panoramaData = m_panoramaShapeData as DanbiPanoramaData;
            var panoramaDataComputeBuffer = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(panoramaData.asStruct, panoramaData.stride);

            control.bufferDict.AddBuffer_NoDuplicate("_PanoramaData", panoramaDataComputeBuffer);
        }
Esempio n. 4
0
        public void RebuildMeshShape()
        {
            var control = DanbiManager.instance.shaderControl;

            m_panoramaMeshData.Clear();
            m_reflectorMeshData.Clear();
            m_meshData.Clear();

            //  fill out with the meshData for mesh data and the shape data for Shader.

            switch (m_texType)
            {
            case EDanbiTextureType.Regular:
                m_panoramaRegular.RebuildMeshShapeForComputeShader(ref m_panoramaMeshData);
                break;

            case EDanbiTextureType.Faces4:
                m_panorama4faces.RebuildMeshShapeForComputeShader(ref m_panoramaMeshData);
                break;

            case EDanbiTextureType.Panorama:
                m_panorama360.RebuildMeshShapeForComputeShader(ref m_panoramaMeshData);
                break;
            }
            m_reflector.RebuildMeshShapeForComputeShader(ref m_reflectorMeshData);

            m_meshData.JoinData(m_panoramaMeshData, m_reflectorMeshData);

            // 3. Find Kernel and set it as a current kernel.
            DanbiKernelHelper.CurrentKernelIndex = DanbiKernelHelper.CalcCurrentKernelIndex(m_meshType, m_panoramaType);

            // 4. Populate the compuate buffer dictionary.
            var vtxComputeBuffer = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(m_meshData.Vertices, 12);

            control.bufferDict.AddBuffer_NoDuplicate("_Vertices", vtxComputeBuffer);

            var idxComputeBuffer = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(m_meshData.Indices, 4);

            control.bufferDict.AddBuffer_NoDuplicate("_Indices", idxComputeBuffer);

            var texcoordsComputeBuffer = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(m_meshData.Texcoords, 8);

            control.bufferDict.AddBuffer_NoDuplicate("_Texcoords", texcoordsComputeBuffer);
        }
Esempio n. 5
0
        void Awake()
        {
            // query the hardward it supports the compute shader.
            if (!SystemInfo.supportsComputeShaders)
            {
                Debug.LogError("This machine doesn't support Compute Shader!", this);
            }

            // Initialize the Screen Sampling shader.
            m_addMaterial_ScreenSampling = new Material(Shader.Find("Hidden/AddShader"));

            DanbiPanoramaScreenChanger.onCenterPosOfMeshUpdate_Panorama +=
                (Vector3 newCenterOfPanoramaMesh) =>
            {
                m_centerOfPanoramaMesh = new Vector4(newCenterOfPanoramaMesh.x,
                                                     newCenterOfPanoramaMesh.y,
                                                     newCenterOfPanoramaMesh.z,
                                                     0.0f);
            };

            DanbiUIImageGeneratorTexturePanel.onTextureTypeChange +=
                (EDanbiTextureType type) => m_isPanoramaTex        = (int)type;

            DanbiUIVideoGeneratorVideoPanel.onVideoTypeChange +=
                (int isPanoramaTex) => m_isPanoramaTex         = isPanoramaTex;

            // Populate kernels index.
            PopulateKernels();

            m_maxNumOfBounce    = 2;
            m_samplingThreshold = 10;

            #region dbg
            // DanbiDbg.PrepareDbgBuffers();
            // SetData is performed automatically when the buffer is created.
            dbg_usedHeightBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_usedHeightArr, 4);
            // dbg_centerOfPanoBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_centerOfPanoArr, 16);
            // dbg_rayLengthBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_rayLengthArr, 12);
            dbg_hitInfoBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_hitInfoArr, 16);
            // dbg_cameraInternalDataBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_cameraInternalData, 40);
            // dbg_cameraToWorldMatBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_cameraInverseProjectionArr, 64);
            // dbg_cameraInverseProjectionBuf = DanbiComputeShaderHelper.CreateComputeBuffer_Ret(dbg_cameraInverseProjectionArr, 64);
            #endregion dbg
        }
Esempio n. 6
0
        void PrepareExternalParametersToMatrix()
        {
            // 1. Create the Camera Transform
            float4x4 ViewTransform_OpenCV = new float4x4(new float4(m_cameraExternalData.xAxis, 0),
                                                         new float4(m_cameraExternalData.yAxis, 0),
                                                         new float4(m_cameraExternalData.zAxis, 0),
                                                         new float4(m_cameraExternalData.projectorPosition * 0.001f, 1)
                                                         );
            // Debug.Log($"ViewTransform =\n{  ViewTransform_OpenCV }");

            float3x3 ViewTransform_Rot_OpenCV = new float3x3(
                ViewTransform_OpenCV.c0.xyz, ViewTransform_OpenCV.c1.xyz, ViewTransform_OpenCV.c2.xyz);

            float3 ViewTransform_Trans_OpenCV = ViewTransform_OpenCV.c3.xyz;


            float3x3 CameraTransformation_Rot_OpenCV = math.transpose(ViewTransform_Rot_OpenCV);


            // float4x4 CameraTransformation_OpenCV = new float4x4(CameraTransformation_Rot_OpenCV,
            //                                         -math.mul(CameraTransformation_Rot_OpenCV, ViewTransform_Trans_OpenCV));

            float4x4 CameraTransformation_OpenCV = new float4x4(new float4(CameraTransformation_Rot_OpenCV.c0, 0.0f),
                                                                new float4(CameraTransformation_Rot_OpenCV.c1, 0.0f),
                                                                new float4(CameraTransformation_Rot_OpenCV.c2, 0.0f),
                                                                new float4(-math.mul(CameraTransformation_Rot_OpenCV, ViewTransform_Trans_OpenCV), 1.0f));

            // Debug.Log($"CameraTransformation_OpenCV (obtained by transpose) =\n{ CameraTransformation_OpenCV }");


            // float4x4 CameraTransform_OpenCV = math.inverse(ViewTransform_OpenCV);
            // Debug.Log($" CameraTransform_OpenCV (obtained by inverse)=\n{  CameraTransform_OpenCV }");

            // https://stackoverflow.com/questions/1263072/changing-a-matrix-from-right-handed-to-left-handed-coordinate-system

            // UnityToOpenMat is a change of basis matrix, a swap of axes, with a determinmant -1, which is
            // improper rotation, and so a well-defined quaternion does not exist for it.

            float4 externalData_column0 = new float4(DanbiCameraExternalData.UnityToOpenCVMat.c0, 0);
            float4 externalData_column1 = new float4(DanbiCameraExternalData.UnityToOpenCVMat.c1, 0);
            float4 externalData_column2 = new float4(DanbiCameraExternalData.UnityToOpenCVMat.c2, 0);
            float4 externalData_column3 = new float4(0, 0, 0, 1);


            float4x4 UnityToOpenCV = new float4x4(externalData_column0, externalData_column1, externalData_column2, externalData_column3);

            float3x3 UnityToOpenCV_Rot = new float3x3(UnityToOpenCV.c0.xyz,
                                                      UnityToOpenCV.c1.xyz,
                                                      UnityToOpenCV.c2.xyz);

            float3x3 OpenCVToUnity_Rot = math.transpose(UnityToOpenCV_Rot);

            float3 UnityToOpenCV_Trans = UnityToOpenCV.c3.xyz;

            float4x4 OpenCVToUnity = new float4x4(OpenCVToUnity_Rot, -math.mul(OpenCVToUnity_Rot, UnityToOpenCV_Trans));

            // Debug.Log($" UnityToOpenCV inverse = \n {math.inverse(UnityToOpenCV)} ");

            // Debug.Log($" UnityToOpenCV transpose  = \n {OpenCVToUnity}");

            float4x4 MatForObjectFrame = new float4x4(
                new float4(1, 0, 0, 0),
                new float4(0, 0, 1, 0),
                new float4(0, -1, 0, 0),
                new float4(0, 0, 0, 1));


            float4x4 CameraTransform_Unity = math.mul(
                math.mul(
                    math.mul(UnityToOpenCV,
                             CameraTransformation_OpenCV
                             ),
                    OpenCVToUnity),
                MatForObjectFrame
                );

            m_cameraTransform_Unity_Mat4x4 = CameraTransform_Unity;
            // Debug.Log($"Determinimant of m_cameraTransform_Unity_Mat4x4=\n{m_cameraTransform_Unity_Mat4x4.determinant}");

            // 2. Set the Camera.main.transform.


            // Debug.Log($"Quaternion = m_cameraTransform_Unity_Mat4x4.rotation=  \n {m_cameraTransform_Unity_Mat4x4.rotation}");
            // Debug.Log($"QuaternionFromMatrix(MatForUnityCameraFrameMat4x4)\n{DanbiComputeShaderHelper.QuaternionFromMatrix(m_cameraTransform_Unity_Mat4x4)}");

            // Camera.main.gameObject.transform.position = new Vector3(0.0f, m_cameraExternalData.projectorPosition.z * 0.001f, 0.0f); // m_cameraHeight -> cm
            m_unityCamPos   = DanbiComputeShaderHelper.GetPosition(m_cameraTransform_Unity_Mat4x4);
            m_unityCamPos.x = 0.0f;
            m_unityCamPos.z = 0.0f;
            SetRenderingCameraTransformation();
        }
Esempio n. 7
0
 public void CreateCameraBuffers(DanbiComputeShader shaderControl)
 {
     shaderControl.bufferDict.AddBuffer_NoDuplicate("_CameraInternalData", DanbiComputeShaderHelper.CreateComputeBuffer_Ret(m_cameraInternalData.asStruct, m_cameraInternalData.stride));
 }