/// <summary>
        /// Raises the webcam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            InitializeCalibraton(webCamTextureMat);

            // if WebCamera is frontFaceing, flip Mat.
            webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.IsFrontFacing();
        }
Exemplo n.º 2
0
        /// <summary>
        /// Raises the change camera button click event.
        /// </summary>
        public void OnChangeCameraButtonClick()
        {
            if (isImagesInputMode)
            {
                return;
            }

            webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
        }
Exemplo n.º 3
0
 /// <summary>
 /// Raises the change camera button click event.
 /// </summary>
 public void OnChangeCameraButtonClick()
 {
     webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
 }
Exemplo n.º 4
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat grayMat = webCamTextureToMatHelper.GetMat();

            float rawFrameWidth  = grayMat.width();
            float rawFrameHeight = grayMat.height();

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            float width  = downScaleMat.width();
            float height = downScaleMat.height();

            texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1);
            previewQuad.SetActive(displayCameraPreview);


            //Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            DebugUtils.AddDebugStr(webCamTextureToMatHelper.GetWidth() + " x " + webCamTextureToMatHelper.GetHeight() + " : " + webCamTextureToMatHelper.GetFPS());
            if (enableDownScale)
            {
                DebugUtils.AddDebugStr("enableDownScale = true: " + DOWNSCALE_RATIO + " / " + width + " x " + height);
            }


            // create camera matrix and dist coeffs.
            string loadDirectoryPath               = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample");
            string calibratonDirectoryName         = "camera_parameters" + rawFrameWidth + "x" + rawFrameWidth;
            string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
            string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");

            if (useStoredCameraParameters && File.Exists(loadPath))
            {
                // If there is a camera parameters stored by HoloLensArUcoCameraCalibrationExample, use it

                CameraParameters param;
                XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));
                using (var stream = new FileStream(loadPath, FileMode.Open))
                {
                    param = (CameraParameters)serializer.Deserialize(stream);
                }

                double fx = param.camera_matrix[0];
                double fy = param.camera_matrix[4];
                double cx = param.camera_matrix[2];
                double cy = param.camera_matrix[5];

                camMatrix  = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());

                Debug.Log("Loaded CameraParameters from a stored XML file.");
                Debug.Log("loadPath: " + loadPath);

                DebugUtils.AddDebugStr("Loaded CameraParameters from a stored XML file.");
                DebugUtils.AddDebugStr("loadPath: " + loadPath);
            }
            else
            {
                if (useStoredCameraParameters && !File.Exists(loadPath))
                {
                    DebugUtils.AddDebugStr("The CameraParameters XML file (" + loadPath + ") does not exist.");
                }

#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
                CameraIntrinsics cameraIntrinsics = webCamTextureToMatHelper.GetCameraIntrinsics();

                camMatrix  = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2);

                Debug.Log("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device.");

                DebugUtils.AddDebugStr("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device.");
#else
                // The camera matrix value of Hololens camera 896x504 size.
                // For details on the camera matrix, please refer to this page. (http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html)
                // These values ​​are unique to my device, obtained from the "Windows.Media.Devices.Core.CameraIntrinsics" class. (https://docs.microsoft.com/en-us/uwp/api/windows.media.devices.core.cameraintrinsics)
                // Can get these values by using this helper script. (https://github.com/EnoxSoftware/HoloLensWithOpenCVForUnityExample/tree/master/Assets/HololensCameraIntrinsicsChecker/CameraIntrinsicsCheckerHelper)
                double fx          = 1035.149;   //focal length x.
                double fy          = 1034.633;   //focal length y.
                double cx          = 404.9134;   //principal point x.
                double cy          = 236.2834;   //principal point y.
                double distCoeffs1 = 0.2036923;  //radial distortion coefficient k1.
                double distCoeffs2 = -0.2035773; //radial distortion coefficient k2.
                double distCoeffs3 = 0.0;        //tangential distortion coefficient p1.
                double distCoeffs4 = 0.0;        //tangential distortion coefficient p2.
                double distCoeffs5 = -0.2388065; //radial distortion coefficient k3.

                camMatrix  = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5);

                Debug.Log("Created a dummy CameraParameters (896x504).");

                DebugUtils.AddDebugStr("Created a dummy CameraParameters (896x504).");
#endif
            }

            Debug.Log("camMatrix " + camMatrix.dump());
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //DebugUtils.AddDebugStr("camMatrix " + camMatrix.dump());
            //DebugUtils.AddDebugStr("distCoeffs " + distCoeffs.dump());


            //Calibration camera
            Size     imageSize      = new Size(width, height);
            double   apertureWidth  = 0;
            double   apertureHeight = 0;
            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);
            double[] aspectratio    = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);

            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;

            ids             = new Mat();
            corners         = new List <Mat>();
            rejectedCorners = new List <Mat>();
            rvecs           = new Mat();
            tvecs           = new Mat();
            rotMat          = new Mat(3, 3, CvType.CV_64FC1);


            transformationM = new Matrix4x4();

            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());

            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250);


            //If WebCamera is frontFaceing, flip Mat.
            webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.IsFrontFacing();

            rgbMat4preview = new Mat();
        }