Пример #1
0
        /// <summary>
        /// converts rotation vector to rotation matrix using Rodrigues transformation
        /// </summary>
        /// <param name="vector">Input rotation vector (3x1).</param>
        /// <param name="matrix">Output rotation matrix (3x3).</param>
        /// <param name="jacobian">Optional output Jacobian matrix, 3x9, which is a matrix of partial derivatives of the output array components with respect to the input array components.</param>
        public static void Rodrigues(double[] vector, out double[,] matrix, out double[,] jacobian)
        {
            if (vector == null)
                throw new ArgumentNullException("vector");
            if (vector.Length != 3)
                throw new ArgumentException("vector.Length != 3");

            using (var vectorM = new Mat(3, 1, MatType.CV_64FC1, vector))
            using (var matrixM = new MatOfDouble())
            using (var jacobianM = new MatOfDouble())
            {
                NativeMethods.calib3d_Rodrigues_VecToMat(vectorM.CvPtr, matrixM.CvPtr, jacobianM.CvPtr);
                matrix = matrixM.ToRectangularArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
Пример #2
0
        /// <summary>
        /// converts rotation matrix to rotation vector using Rodrigues transformation
        /// </summary>
        /// <param name="matrix">Input rotation matrix (3x3).</param>
        /// <param name="vector">Output rotation vector (3x1).</param>
        /// <param name="jacobian">Optional output Jacobian matrix, 3x9, which is a matrix of partial derivatives of the output array components with respect to the input array components.</param>
        public static void Rodrigues(double[,] matrix, out double[] vector, out double[,] jacobian)
        {
            if (matrix == null)
                throw new ArgumentNullException("matrix");
            if (matrix.GetLength(0) != 3 || matrix.GetLength(1) != 3)
                throw new ArgumentException("matrix must be double[3,3]");

            using (var matrixM = new Mat(3, 3, MatType.CV_64FC1, matrix))
            using (var vectorM = new MatOfDouble())
            using (var jacobianM = new MatOfDouble())
            {
                NativeMethods.calib3d_Rodrigues_MatToVec(matrixM.CvPtr, vectorM.CvPtr, jacobianM.CvPtr);
                vector = vectorM.ToArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
				/// <summary>
				/// Init this instance.
				/// </summary>
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
										
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
					
										break;
								}
				
				
						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					

					
										gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);
					

										gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

					
										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;


										Camera.main.orthographicSize = webCamTexture.height / 2;





										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


					
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
		
				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;

						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif
								
				
								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								//flip to correct direction.
								if (webCamTexture.videoVerticallyMirrored) {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, -1);
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 0);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 0) {
									
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										}
								} else {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 0);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 0) {
									
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								}
				
								markerDetector.processFrame (rgbaMat, 1);

								//Debug.Log ("markerDetector.getTransformations ().Count " + markerDetector.getTransformations ().Count);


								for (int i = 0; i < ARCamera.Length; i++) {
										ARCamera [i].gameObject.SetActive (false);
								}

								int markerCount = markerDetector.getTransformations ().Count;
								for (int i = 0; i < markerCount; i++) {
										if (i > ARCamera.Length - 1)
												break;
									
										ARCamera [i].gameObject.SetActive (true);

										//Marker to Camera Coordinate System Convert Matrix
										transformationM = markerDetector.getTransformations () [i];
										//Debug.Log ("transformationM " + transformationM.ToString ());
				
										worldToCameraM = lookAtM * transformationM * invertZM;
										//Debug.Log ("worldToCameraM " + worldToCameraM.ToString ());
				
										ARCamera [i].worldToCameraMatrix = worldToCameraM;
								}
							
				
								Utils.matToTexture2D (rgbaMat, texture, colors);
				
								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				
				
						}
			
				}
		
				void OnDisable ()
				{
						webCamTexture.Stop ();
				}

				/// <summary>
				/// Gets the look at matrix.
				/// </summary>
				/// <returns>The look at matrix.</returns>
				/// <param name="pos">Position.</param>
				/// <param name="target">Target.</param>
				/// <param name="up">Up.</param>
				private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up)
				{
			
						Vector3 z = Vector3.Normalize (pos - target);
						Vector3 x = Vector3.Normalize (Vector3.Cross (up, z));
						Vector3 y = Vector3.Normalize (Vector3.Cross (z, x));
			
						Matrix4x4 result = new Matrix4x4 ();
						result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x))));
						result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y))));
						result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z))));
						result.SetRow (3, new Vector4 (0, 0, 0, 1));
			
						return result;
				}

				void OnGUI ()
				{
						float screenScale = Screen.height / 240.0f;
						Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
						GUI.matrix = scaledMatrix;
			
			
						GUILayout.BeginVertical ();
			
						if (GUILayout.Button ("back")) {
								Application.LoadLevel ("MarkerBasedARSample");
						}
						if (GUILayout.Button ("change camera")) {
								isFrontFacing = !isFrontFacing;
								StartCoroutine (init ());
						}
			
						GUILayout.EndVertical ();
				}
		}
Пример #4
0
        // Use this for initialization
        void Start()
        {
            Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());


            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set cameraparam.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }



            Mat        ids      = new Mat();
            List <Mat> corners  = new List <Mat> ();
            List <Mat> rejected = new List <Mat> ();
            Mat        rvecs    = new Mat();
            Mat        tvecs    = new Mat();
            Mat        rotMat   = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary(dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            // estimate pose.
            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
            }

            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                if (applyEstimationPose)
                {
                    for (int i = 0; i < ids.total(); i++)
                    {
//                        Debug.Log ("ids.dump() " + ids.dump ());

                        Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                        // This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // position
                            double[] tvec = tvecs.get(i, 0);

                            // rotation
                            double[] rv   = rvecs.get(i, 0);
                            Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rv[0]);
                            rvec.put(1, 0, rv[1]);
                            rvec.put(2, 0, rv[2]);
                            Calib3d.Rodrigues(rvec, rotMat);

                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                            Debug.Log("invertZM " + invertZM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            Matrix4x4 ARM = invertYM * transformationM;

                            // Apply Z axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejected && rejected.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
            }


            Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(rgbMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Пример #5
0
        /// <summary>
        /// The main program entry point
        /// </summary>
        /// <param name="args">The command line arguments</param>
        static void Main(string[] args)
        {
            // set up Dlib facedetectors and shapedetectors
            using (var fd = Dlib.GetFrontalFaceDetector())
                using (var sp = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat"))
                {
                    // load input image
                    var img = Dlib.LoadImage <RgbPixel>(inputFilePath);

                    // find all faces in the image
                    var faces = fd.Operator(img);
                    foreach (var face in faces)
                    {
                        // find the landmark points for this face
                        var shape = sp.Detect(img, face);

                        // build the 3d face model
                        var model = Utility.GetFaceModel();

                        // get the landmark point we need
                        var landmarks = new MatOfPoint2d(1, 6,
                                                         (from i in new int[] { 30, 8, 36, 45, 48, 54 }
                                                          let pt = shape.GetPart((uint)i)
                                                                   select new OpenCvSharp.Point2d(pt.X, pt.Y)).ToArray());

                        // build the camera matrix
                        var cameraMatrix = Utility.GetCameraMatrix((int)img.Rect.Width, (int)img.Rect.Height);

                        // build the coefficient matrix
                        var coeffs = new MatOfDouble(4, 1);
                        coeffs.SetTo(0);

                        // find head rotation and translation
                        Mat rotation    = new MatOfDouble();
                        Mat translation = new MatOfDouble();
                        Cv2.SolvePnP(model, landmarks, cameraMatrix, coeffs, rotation, translation);

                        // find euler angles
                        var euler = Utility.GetEulerMatrix(rotation);

                        // calculate head rotation in degrees
                        var yaw   = 180 * euler.At <double>(0, 2) / Math.PI;
                        var pitch = 180 * euler.At <double>(0, 1) / Math.PI;
                        var roll  = 180 * euler.At <double>(0, 0) / Math.PI;

                        // looking straight ahead wraps at -180/180, so make the range smooth
                        pitch = Math.Sign(pitch) * 180 - pitch;

                        // calculate if the driver is facing forward
                        // the left/right angle must be in the -25..25 range
                        // the up/down angle must be in the -10..10 range
                        var facingForward =
                            yaw >= -25 && yaw <= 25 &&
                            pitch >= -10 && pitch <= 10;

                        // create a new model point in front of the nose, and project it into 2d
                        var poseModel      = new MatOfPoint3d(1, 1, new Point3d(0, 0, 1000));
                        var poseProjection = new MatOfPoint2d();
                        Cv2.ProjectPoints(poseModel, rotation, translation, cameraMatrix, coeffs, poseProjection);

                        // draw the key landmark points in yellow on the image
                        foreach (var i in new int[] { 30, 8, 36, 45, 48, 54 })
                        {
                            var point = shape.GetPart((uint)i);
                            var rect  = new Rectangle(point);
                            Dlib.DrawRectangle(img, rect, color: new RgbPixel(255, 255, 0), thickness: 4);
                        }

                        // draw a line from the tip of the nose pointing in the direction of head pose
                        var landmark = landmarks.At <Point2d>(0);
                        var p        = poseProjection.At <Point2d>(0);
                        Dlib.DrawLine(
                            img,
                            new DlibDotNet.Point((int)landmark.X, (int)landmark.Y),
                            new DlibDotNet.Point((int)p.X, (int)p.Y),
                            color: new RgbPixel(0, 255, 255));

                        // draw a box around the face if it's facing forward
                        if (facingForward)
                        {
                            Dlib.DrawRectangle(img, face, color: new RgbPixel(0, 255, 255), thickness: 4);
                        }
                    }

                    // export the modified image
                    Dlib.SaveJpeg(img, "output.jpg");
                }
        }
Пример #6
0
				/// <summary>
				/// Raises the web cam texture to mat helper inited event.
				/// </summary>
				public void OnWebCamTextureToMatHelperInited ()
				{
						Debug.Log ("OnWebCamTextureToMatHelperInited");
			
						Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
			
						colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
						texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);



						gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
			
						Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
			
						float width = 0;
						float height = 0;
			
						width = gameObject.transform.localScale.x;
						height = gameObject.transform.localScale.y;

						float imageScale = 1.0f;
						float widthScale = (float)Screen.width / width;
						float heightScale = (float)Screen.height / height;
						if (widthScale < heightScale) {
								Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
								imageScale = (float)Screen.height / (float)Screen.width;
						} else {
								Camera.main.orthographicSize = height / 2;
						}
			
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;




						grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
									
						cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
						if (cascade.empty ()) {
								Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
						}
									
									
						int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ());
						camMatrix = new Mat (3, 3, CvType.CV_64FC1);
						camMatrix.put (0, 0, max_d);
						camMatrix.put (0, 1, 0);
						camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f);
						camMatrix.put (1, 0, 0);
						camMatrix.put (1, 1, max_d);
						camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f);
						camMatrix.put (2, 0, 0);
						camMatrix.put (2, 1, 0);
						camMatrix.put (2, 2, 1.0f);
									
						Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale);
						double apertureWidth = 0;
						double apertureHeight = 0;
						double[] fovx = new double[1];
						double[] fovy = new double[1];
						double[] focalLength = new double[1];
						Point principalPoint = new Point ();
						double[] aspectratio = new double[1];
									
									
									
									
						Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
									
						Debug.Log ("imageSize " + imageSize.ToString ());
						Debug.Log ("apertureWidth " + apertureWidth);
						Debug.Log ("apertureHeight " + apertureHeight);
						Debug.Log ("fovx " + fovx [0]);
						Debug.Log ("fovy " + fovy [0]);
						Debug.Log ("focalLength " + focalLength [0]);
						Debug.Log ("principalPoint " + principalPoint.ToString ());
						Debug.Log ("aspectratio " + aspectratio [0]);
									
									
						if (Screen.height > Screen.width) {
								ARCamera.fieldOfView = (float)fovx [0];
						} else {
								ARCamera.fieldOfView = (float)fovy [0];
						}

									
						Debug.Log ("camMatrix " + camMatrix.dump ());
									
									
						distCoeffs = new MatOfDouble (0, 0, 0, 0);
						Debug.Log ("distCoeffs " + distCoeffs.dump ());
									
									
									
						lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
						Debug.Log ("lookAt " + lookAtM.ToString ());
									
						invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));


						axes.SetActive (false);
						head.SetActive (false);
						rightEye.SetActive (false);
						leftEye.SetActive (false);
						mouth.SetActive (false);


			
						
			
				}
Пример #7
0
        /// <summary>
        /// Decomposes the projection matrix into camera matrix and the rotation martix and the translation vector
        /// </summary>
        /// <param name="projMatrix">3x4 input projection matrix P.</param>
        /// <param name="cameraMatrix">Output 3x3 camera matrix K.</param>
        /// <param name="rotMatrix">Output 3x3 external rotation matrix R.</param>
        /// <param name="transVect">Output 4x1 translation vector T.</param>
        /// <param name="rotMatrixX">Optional 3x3 rotation matrix around x-axis.</param>
        /// <param name="rotMatrixY">Optional 3x3 rotation matrix around y-axis.</param>
        /// <param name="rotMatrixZ">Optional 3x3 rotation matrix around z-axis.</param>
        /// <param name="eulerAngles">ptional three-element vector containing three Euler angles of rotation in degrees.</param>
        public static void DecomposeProjectionMatrix(double[,] projMatrix,
                                                     out double[,] cameraMatrix,
                                                     out double[,] rotMatrix,
                                                     out double[] transVect,
                                                     out double[,] rotMatrixX,
                                                     out double[,] rotMatrixY,
                                                     out double[,] rotMatrixZ,
                                                     out double[] eulerAngles)
        {
            if (projMatrix == null)
                throw new ArgumentNullException("projMatrix");
            int dim0 = projMatrix.GetLength(0);
            int dim1 = projMatrix.GetLength(1);
            if (!((dim0 == 3 && dim1 == 4) || (dim0 == 4 && dim1 == 3)))
                throw new ArgumentException("projMatrix must be double[3,4] or double[4,3]");

            using (var projMatrixM = new Mat(3, 4, MatType.CV_64FC1, projMatrix))
            using (var cameraMatrixM = new MatOfDouble())
            using (var rotMatrixM = new MatOfDouble())
            using (var transVectM = new MatOfDouble())
            using (var rotMatrixXM = new MatOfDouble())
            using (var rotMatrixYM = new MatOfDouble())
            using (var rotMatrixZM = new MatOfDouble())
            using (var eulerAnglesM = new MatOfDouble())
            {
                NativeMethods.calib3d_decomposeProjectionMatrix_Mat(
                    projMatrixM.CvPtr, 
                    cameraMatrixM.CvPtr, rotMatrixM.CvPtr, transVectM.CvPtr,
                    rotMatrixXM.CvPtr, rotMatrixYM.CvPtr, rotMatrixZM.CvPtr, 
                    eulerAnglesM.CvPtr);

                cameraMatrix = cameraMatrixM.ToRectangularArray();
                rotMatrix = rotMatrixM.ToRectangularArray();
                transVect = transVectM.ToArray();
                rotMatrixX = rotMatrixXM.ToRectangularArray();
                rotMatrixY = rotMatrixYM.ToRectangularArray();
                rotMatrixZ = rotMatrixZM.ToRectangularArray();
                eulerAngles = eulerAnglesM.ToArray();
            }
        }
Пример #8
0
        public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics)
        {
            isDetectingInFrameArrivedThread = true;

            DebugUtils.VideoTick();

            Mat   downScaleMat = null;
            float DOWNSCALE_RATIO;

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            Mat         camMatrix  = null;
            MatOfDouble distCoeffs = null;

            if (useStoredCameraParameters)
            {
                camMatrix  = this.camMatrix;
                distCoeffs = this.distCoeffs;
            }
            else
            {
                camMatrix  = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2);
            }

            if (enableDetection)
            {
                // Detect markers and estimate Pose
                Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        //This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Convert to unity pose data.
                            double[] rvecArr = new double[3];
                            rvecs.get(0, 0, rvecArr);
                            double[] tvecArr = new double[3];
                            tvecs.get(0, 0, tvecArr);
                            tvecArr[2] /= DOWNSCALE_RATIO;
                            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                            // Create transform matrix.
                            transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                            lock (sync)
                            {
                                // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z-axis inverted matrix.
                                ARM = ARM * invertZM;
                            }

                            hasUpdatedARTransformMatrix = true;

                            break;
                        }
                    }
                }
            }

            Mat rgbMat4preview = null;

            if (displayCameraPreview)
            {
                rgbMat4preview = new Mat();
                Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }
            }

            DebugUtils.TrackTick();

            Enqueue(() =>
            {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && rgbMat4preview != null)
                {
                    Utils.fastMatToTexture2D(rgbMat4preview, texture);
                    rgbMat4preview.Dispose();
                }

                if (applyEstimationPose)
                {
                    if (hasUpdatedARTransformMatrix)
                    {
                        hasUpdatedARTransformMatrix = false;

                        lock (sync)
                        {
                            // Apply camera transform matrix.
                            ARM = cameraToWorldMatrix * invertZM * ARM;

                            if (enableLerpFilter)
                            {
                                arGameObject.SetMatrix4x4(ARM);
                            }
                            else
                            {
                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }

                grayMat.Dispose();
            });

            isDetectingInFrameArrivedThread = false;
        }
Пример #9
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError(caffemodel_filepath + " or " + prototxt_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNet(prototxt_filepath, caffemodel_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat input = Dnn.blobFromImage(img, inScale, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);

                net.setInput(input);

                //TickMeter tm = new TickMeter ();
                //tm.start ();

                Mat output = net.forward();

                //tm.stop ();
                //Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());

                //Debug.Log("output.size(0) " + output.size(0));
                //Debug.Log("output.size(1) " + output.size(1));
                //Debug.Log("output.size(2) " + output.size(2));
                //Debug.Log("output.size(3) " + output.size(3));

                float[] data = new float[output.size(2) * output.size(3)];

                output = output.reshape(1, output.size(1));

                List <Point> points = new List <Point>();
                for (int i = 0; i < BODY_PARTS.Count; i++)
                {
                    output.get(i, 0, data);

                    Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1);
                    heatMap.put(0, 0, data);


                    //Originally, we try to find all the local maximums. To simplify a sample
                    //we just find a global one. However only a single pose at the same time
                    //could be detected this way.
                    Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);

                    heatMap.Dispose();


                    double x = (frameWidth * (result.maxLoc.x % 46)) / 46;
                    double y = (frameHeight * (result.maxLoc.x / 46)) / 46;

                    if (result.maxVal > 0.1)
                    {
                        points.Add(new Point(x, y));
                    }
                    else
                    {
                        points.Add(null);
                    }
                }

                for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
                {
                    string partFrom = POSE_PAIRS[i, 0];
                    string partTo   = POSE_PAIRS[i, 1];

                    int idFrom = BODY_PARTS[partFrom];
                    int idTo   = BODY_PARTS[partTo];

                    if (points[idFrom] != null && points[idTo] != null)
                    {
                        Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
                        Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                        Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    }
                }

                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                Debug.Log("t: " + t);
                Debug.Log("timings.dump(): " + timings.dump());

                double freq = Core.getTickFrequency() / 1000;
                Debug.Log("freq: " + freq);

                Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
        private IEnumerator init()
        {
            axes.SetActive (false);
                        head.SetActive (false);
                        rightEye.SetActive (false);
                        leftEye.SetActive (false);
                        mouth.SetActive (false);

                        if (webCamTexture != null) {
                                faceTracker.reset ();

                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                grayMat.Dispose ();
                                cascade.Dispose ();
                                camMatrix.Dispose ();
                                distCoeffs.Dispose ();

                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }
                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif
                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                                        if (cascade.empty ()) {
                                                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                                        }

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

                                        gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
                                        camMatrix = new Mat (3, 3, CvType.CV_64FC1);
                                        camMatrix.put (0, 0, max_d);
                                        camMatrix.put (0, 1, 0);
                                        camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
                                        camMatrix.put (1, 0, 0);
                                        camMatrix.put (1, 1, max_d);
                                        camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
                                        camMatrix.put (2, 0, 0);
                                        camMatrix.put (2, 1, 0);
                                        camMatrix.put (2, 2, 1.0f);

                                        Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
                                        double apertureWidth = 0;
                                        double apertureHeight = 0;
                                        double[] fovx = new double[1];
                                        double[] fovy = new double[1];
                                        double[] focalLength = new double[1];
                                        Point principalPoint = new Point ();
                                        double[] aspectratio = new double[1];

                                        Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                                        Debug.Log ("imageSize " + imageSize.ToString ());
                                        Debug.Log ("apertureWidth " + apertureWidth);
                                        Debug.Log ("apertureHeight " + apertureHeight);
                                        Debug.Log ("fovx " + fovx [0]);
                                        Debug.Log ("fovy " + fovy [0]);
                                        Debug.Log ("focalLength " + focalLength [0]);
                                        Debug.Log ("principalPoint " + principalPoint.ToString ());
                                        Debug.Log ("aspectratio " + aspectratio [0]);

                                        ARCamera.fieldOfView = (float)fovy [0];

                                        Debug.Log ("camMatrix " + camMatrix.dump ());

                                        distCoeffs = new MatOfDouble (0, 0, 0, 0);
                                        Debug.Log ("distCoeffs " + distCoeffs.dump ());

                                        lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
                                        Debug.Log ("lookAt " + lookAtM.ToString ());

                                        invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }
                                //convert image to greyscale
                                Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                                if (faceTracker.getPoints ().Count <= 0) {
                                        Debug.Log ("detectFace");

                                        //convert image to greyscale
                                        using (Mat equalizeHistMat = new Mat ())
                                        using (MatOfRect faces = new MatOfRect ()) {

                                                Imgproc.equalizeHist (grayMat, equalizeHistMat);

                                                cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
                                                        | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                        | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

                                                if (faces.rows () > 0) {
                                                        Debug.Log ("faces " + faces.dump ());
                                                        //add initial face points from MatOfRect
                                                        faceTracker.addPoints (faces);

                                                        //draw face rect
                                                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                                                        for (int i = 0; i < rects.Length; i++) {
                                                                Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                                                        }
                                                }

                                        }

                                }

                                //track face points.if face points <= 0, always return false.
                                if (faceTracker.track (grayMat, faceTrackerParams)) {
                                        if (isDrawPoints)
                                                faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

                                        Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                        Point[] points = faceTracker.getPoints () [0];

                                        if (points.Length > 0) {

            //												for (int i = 0; i < points.Length; i++) {
            //														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
            //												}

                                                imagePoints.fromArray (
                        points [31],//l eye
                        points [36],//r eye
                        points [67],//nose
                        points [48],//l mouth
                        points [54] //r mouth
            //							,
            //											points [1],//l ear
            //											points [13]//r ear
                                                );

                                                Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                                                bool isRefresh = false;

                                                if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) {

                                                        isRefresh = true;

                                                        if (oldRvec == null) {
                                                                oldRvec = new Mat ();
                                                                rvec.copyTo (oldRvec);
                                                        }
                                                        if (oldTvec == null) {
                                                                oldTvec = new Mat ();
                                                                tvec.copyTo (oldTvec);
                                                        }

                                                        //filter Rvec Noise.
                                                        using (Mat absDiffRvec = new Mat ()) {
                                                                Core.absdiff (rvec, oldRvec, absDiffRvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpRvec = new Mat ()) {
                                                                        Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpRvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                        //filter Tvec Noise.
                                                        using (Mat absDiffTvec = new Mat ()) {
                                                                Core.absdiff (tvec, oldTvec, absDiffTvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpTvec = new Mat ()) {
                                                                        Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpTvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                }

                                                if (isRefresh) {

                                                        if (!rightEye.activeSelf)
                                                                rightEye.SetActive (true);
                                                        if (!leftEye.activeSelf)
                                                                leftEye.SetActive (true);

                                                        if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2
                                                                && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9)
                                                                || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {

                                                                if (!mouth.activeSelf)
                                                                        mouth.SetActive (true);

                                                        } else {
                                                                if (mouth.activeSelf)
                                                                        mouth.SetActive (false);
                                                        }

                                                        rvec.copyTo (oldRvec);
                                                        tvec.copyTo (oldTvec);

                                                        Calib3d.Rodrigues (rvec, rotM);

                                                        transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
                                                        transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
                                                        transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
                                                        transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));

                                                        modelViewMtrx = lookAtM * transformationM * invertZM;

                                                        ARCamera.worldToCameraMatrix = modelViewMtrx;

                                                        //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                        }

                        if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
                                faceTracker.reset ();
                                if (oldRvec != null) {
                                        oldRvec.Dispose ();
                                        oldRvec = null;
                                }
                                if (oldTvec != null) {
                                        oldTvec.Dispose ();
                                        oldTvec = null;
                                }

                                ARCamera.ResetWorldToCameraMatrix ();

                                rightEye.SetActive (false);
                                leftEye.SetActive (false);
                                mouth.SetActive (false);
                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up)
                {

                        Vector3 z = Vector3.Normalize (pos - target);
                        Vector3 x = Vector3.Normalize (Vector3.Cross (up, z));
                        Vector3 y = Vector3.Normalize (Vector3.Cross (z, x));

                        Matrix4x4 result = new Matrix4x4 ();
                        result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x))));
                        result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y))));
                        result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z))));
                        result.SetRow (3, new Vector4 (0, 0, 0, 1));

                        return result;
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("FaceTrackerSample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        if (GUILayout.Button ("drawPoints")) {
                                if (isDrawPoints) {
                                        isDrawPoints = false;
                                } else {
                                        isDrawPoints = true;
                                }
                        }
                        if (GUILayout.Button ("axes")) {
                                if (axes.activeSelf) {
                                        axes.SetActive (false);
                                } else {
                                        axes.SetActive (true);
                                }
                        }
                        if (GUILayout.Button ("head")) {
                                if (head.activeSelf) {
                                        head.SetActive (false);
                                } else {
                                        head.SetActive (true);
                                }
                        }

                        GUILayout.EndVertical ();
                }

            }
Пример #11
0
        public List <ZOArucoTrackerDetection> DetectMarkers(Mat rgbMat)
        {
            List <ZOArucoTrackerDetection> results = new List <ZOArucoTrackerDetection>();

            // Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            float width          = rgbMat.width();
            float height         = rgbMat.height();
            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                // Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                // Camera.main.orthographicSize = height / 2;
            }

            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            // Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);
            // Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            // Debug.Log("imageSize " + imageSize.ToString());
            // Debug.Log("apertureWidth " + apertureWidth);
            // Debug.Log("apertureHeight " + apertureHeight);
            // Debug.Log("fovx " + fovx[0]);
            // Debug.Log("fovy " + fovy[0]);
            // Debug.Log("focalLength " + focalLength[0]);
            // Debug.Log("principalPoint " + principalPoint.ToString());
            // Debug.Log("aspectratio " + aspectratio[0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            // Debug.Log("fovXScale " + fovXScale);
            // Debug.Log("fovYScale " + fovYScale);
            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat>();
            List <Mat> rejectedCorners = new List <Mat>();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // Debug.Log("INFO: Number of markers detected: " + ids.total());
            // if at least one marker detected
            if (ids.total() > 0)
            {
                if (_debug)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                }


                // estimate pose.
                Aruco.estimatePoseSingleMarkers(corners, _markerLengthMeters, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    // Get translation vector
                    double[] tvecArr = tvecs.get(i, 0);

                    // Get rotation vector
                    double[] rvecArr = rvecs.get(i, 0);
                    Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                    rvec.put(0, 0, rvecArr);

                    // Convert rotation vector to rotation matrix.
                    Calib3d.Rodrigues(rvec, rotMat);
                    double[] rotMatArr = new double[rotMat.total()];
                    rotMat.get(0, 0, rotMatArr);

                    // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                    Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV
                    transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1]));
                    transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                    // Debug.Log("transformationM " + transformationM.ToString());

                    ZOArucoTrackerDetection detection = new ZOArucoTrackerDetection();
                    int [] currentId = new int[1];
                    // ids.get(0, i, currentId);
                    ids.get(i, 0, currentId);
                    detection.arucoId   = currentId[0];
                    detection.transform = transformationM;
                    results.Add(detection);
                }
            }

            return(results);
        }
Пример #12
0
        /// <summary>
        /// computes the connected components labeled image of boolean image. 
        /// image with 4 or 8 way connectivity - returns N, the total number of labels [0, N-1] where 0 
        /// represents the background label. ltype specifies the output label image type, an important 
        /// consideration based on the total number of labels or alternatively the total number of 
        /// pixels in the source image.
        /// </summary>
        /// <param name="image">the image to be labeled</param>
        /// <param name="connectivity">8 or 4 for 8-way or 4-way connectivity respectively</param>
        /// <returns></returns>
        public static ConnectedComponents ConnectedComponentsEx(
            InputArray image, PixelConnectivity connectivity = PixelConnectivity.Connectivity8)
        {
            using (var labelsMat = new MatOfInt())
            using (var statsMat = new MatOfInt())
            using (var centroidsMat = new MatOfDouble())
            {
                int nLabels = ConnectedComponentsWithStats(
                    image, labelsMat, statsMat, centroidsMat, connectivity, MatType.CV_32S);
                var labels = labelsMat.ToRectangularArray();
                var stats = statsMat.ToRectangularArray();
                var centroids = centroidsMat.ToRectangularArray();

                var blobs = new ConnectedComponents.Blob[nLabels];
                for (int i = 0; i < nLabels; i++)
                {
                    blobs[i] = new ConnectedComponents.Blob
                    {
                        Label = i,
                        Left = stats[i, 0],
                        Top = stats[i, 1],
                        Width = stats[i, 2],
                        Height = stats[i, 3],
                        Area = stats[i, 4],
                        Centroid = new Point2d(centroids[i, 0], centroids[i, 1]),
                    };
                }
                return new ConnectedComponents(blobs, labels, nLabels);
            }
        }
				/// <summary>
				/// Init this instance.
				/// </summary>
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
										
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
					
										break;
								}
				
				
						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					
										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();


										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
Пример #14
0
        // プロセスを起動する
        private IEnumerator init()
        {
            rightEye.SetActive (false);
            leftEye.SetActive (false);

            Debug.Log("---------------------------------------------------------------Eye");
            Debug.Log(leftEye.transform.localPosition);
            Debug.Log(rightEye.transform.localPosition);
            Debug.Log("---------------------------------------------------------------Eye");

            if (webCamTexture != null)
            {
                faceTracker.reset();

                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
                cascade.Dispose();
                camMatrix.Dispose();
                distCoeffs.Dispose();
            }

            // カメラがデバイスで使用可能かチェック
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices[cameraIndex].isFrontFacing == shouldUseFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices[cameraIndex].name + " isFrontFacing " + WebCamTexture.devices[cameraIndex].isFrontFacing);
                    webCamDevice = WebCamTexture.devices[cameraIndex];
                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice = WebCamTexture.devices[0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

            // カメラを起動します
            webCamTexture.Play();
            while (true)
            {
                // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります.
                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                #else
                    if (webCamTexture.didUpdateThisFrame)
                    {
                    #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                        while (webCamTexture.width <= 16)
                        {
                        webCamTexture.GetPixels32 ();
                        yield return new WaitForEndOfFrame ();
                        }
                    #endif
                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent<Renderer>().material.mainTexture = texture;

                    updateLayout();

                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double apertureWidth = 0;
                    double apertureHeight = 0;
                    double[] fovx = new double[1];
                    double[] fovy = new double[1];
                    double[] focalLength = new double[1];
                    Point principalPoint = new Point(); // 主点
                    double[] aspectratio = new double[1];

                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx[0]);
                    Debug.Log("fovy " + fovy[0]);
                    Debug.Log("focalLength " + focalLength[0]);
                    Debug.Log("--------------------------principalPoint");
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("--------------------------principalPoint");

                    Debug.Log("aspectratio " + aspectratio[0]);

                    ARCamera.fieldOfView = (float)fovy[0];

                    Debug.Log("camMatrix " + camMatrix.dump());

                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());

                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

                    screenOrientation = Screen.orientation;
                    initDone = true;
                    break;
                    }
                    else
                    {
                        yield return 0;
                    }
                }
            }
        private void InitializeCalibraton(Mat frameMat)
        {
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = frameMat.width();
            float height = frameMat.height();

            texture          = new Texture2D(frameMat.cols(), frameMat.rows(), TextureFormat.RGBA32, false);
            texture.wrapMode = TextureWrapMode.Clamp;

            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1);

            float imageSizeScale = 1.0f;

            // set cameraparam.
            camMatrix = CreateCameraMatrix(width, height);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            grayMat         = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC1);
            bgrMat          = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC3);
            rgbaMat         = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC4);
            ids             = new Mat();
            corners         = new List <Mat> ();
            rejectedCorners = new List <Mat> ();
            rvecs           = new List <Mat> ();
            tvecs           = new List <Mat> ();

            detectorParams = DetectorParameters.create();
            detectorParams.set_cornerRefinementMethod(1); // do cornerSubPix() of OpenCV.
            dictionary = Aruco.getPredefinedDictionary((int)dictionaryId);

            recoveredIdxs = new Mat();

            charucoCorners = new Mat();
            charucoIds     = new Mat();
            charucoBoard   = CharucoBoard.create((int)squaresX, (int)squaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);


            allCorners = new List <List <Mat> > ();
            allIds     = new List <Mat> ();
            allImgs    = new List <Mat> ();

            imagePoints = new List <Mat> ();

            isInitialized = true;
        }
Пример #16
0
        private void DetectMarkers()
        {
            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;



            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat> ();
            List <Mat> rejectedCorners = new List <Mat> ();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // if at least one marker detected
            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));

                // estimate pose.
                if (applyEstimationPose)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                            using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) {
                                // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                            }

                        // This example can display the ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Get translation vector
                            double[] tvecArr = tvecs.get(i, 0);

                            // Get rotation vector
                            double[] rvecArr = rvecs.get(i, 0);
                            Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rvecArr);

                            // Convert rotation vector to rotation matrix.
                            Calib3d.Rodrigues(rvec, rotMat);
                            double[] rotMatArr = new double[rotMat.total()];
                            rotMat.get(0, 0, rotMatArr);

                            // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                            Matrix4x4 ARM = invertYM * transformationM * invertYM;

                            if (shouldMoveARCamera)
                            {
                                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = arCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
            }

            Utils.matToTexture2D(rgbMat, texture);
        }
				// Use this for initialization
				void Start ()
				{

						gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
						Camera.main.orthographicSize = imgTexture.height / 2;

		
						Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
		
						Utils.texture2DToMat (imgTexture, imgMat);
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());

						//set cameraparam
						int max_d = Mathf.Max (imgMat.rows (), imgMat.cols ());
						Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1);
						camMatrix.put (0, 0, max_d);
						camMatrix.put (0, 1, 0);
						camMatrix.put (0, 2, imgMat.cols () / 2.0f);
						camMatrix.put (1, 0, 0);
						camMatrix.put (1, 1, max_d);
						camMatrix.put (1, 2, imgMat.rows () / 2.0f);
						camMatrix.put (2, 0, 0);
						camMatrix.put (2, 1, 0);
						camMatrix.put (2, 2, 1.0f);
						Debug.Log ("camMatrix " + camMatrix.dump ());

						MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0);
						Debug.Log ("distCoeffs " + distCoeffs.dump ());


						//calibration camera
						Size imageSize = new Size (imgMat.cols (), imgMat.rows ());
						double apertureWidth = 0;
						double apertureHeight = 0;
						double[] fovx = new double[1];
						double[] fovy = new double[1];
						double[] focalLength = new double[1];
						Point principalPoint = new Point ();
						double[] aspectratio = new double[1];
		
						Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
		
						Debug.Log ("imageSize " + imageSize.ToString ());
						Debug.Log ("apertureWidth " + apertureWidth);
						Debug.Log ("apertureHeight " + apertureHeight);
						Debug.Log ("fovx " + fovx [0]);
						Debug.Log ("fovy " + fovy [0]);
						Debug.Log ("focalLength " + focalLength [0]);
						Debug.Log ("principalPoint " + principalPoint.ToString ());
						Debug.Log ("aspectratio " + aspectratio [0]);

						//Adjust Unity Camera FOV
						ARCamera.fieldOfView = (float)fovy [0];

//			ARCamera.projectionMatrix = ARCamera.projectionMatrix * Matrix4x4.Scale(new Vector3(-1, -1, 1));
//			gameObject.transform.localScale = new Vector3 (-1 * gameObject.transform.localScale.x, -1 * gameObject.transform.localScale.y, 1);


		 
						MarkerDetector markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign);

						markerDetector.processFrame (imgMat, 1);


						//Marker Coordinate Initial Matrix
						Matrix4x4 lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
						Debug.Log ("lookAt " + lookAtM.ToString ());

						//Marker to Camera Coordinate System Convert Matrix
						if (markerDetector.getTransformations ().Count > 0) {
								Matrix4x4 transformationM = markerDetector.getTransformations () [0];
								Debug.Log ("transformationM " + transformationM.ToString ());

								//OpenGL to Unity Coordinate System Convert Matrix
								//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. 
								Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
								Debug.Log ("invertZM " + invertZM.ToString ());

								Matrix4x4 worldToCameraM = lookAtM * transformationM * invertZM;
								Debug.Log ("worldToCameraM " + worldToCameraM.ToString ());

								ARCamera.worldToCameraMatrix = worldToCameraM;
		
						} else {
								Debug.LogWarning ("Marker is not detected");
						}



						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
		
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
Пример #18
0
        public Mat GetMat()
        {
            if (KernelSize * KernelSize != Values.Count())
            {
                throw new InvalidOperationException("you must fulfill KernelSize^2 == Values.Count");
            }

            var mat = new MatOfDouble(KernelSize, KernelSize,Values);

            Cv2.Normalize(mat, mat,1,0,OpenCvSharp.NormType.L1);
            return mat;
        }
Пример #19
0
        /// <summary>
        /// Niblackの手法による二値化処理を行う(高速だが、メモリを多く消費するバージョン)。
        /// </summary>
        /// <param name="imgSrc">入力画像</param>
        /// <param name="imgDst">出力画像</param>
        /// <param name="kernelSize">局所領域のサイズ</param>
        /// <param name="k">係数</param>
#else
        /// <summary>
        /// Binarizes by Niblack's method (This is faster but memory-hogging)
        /// </summary>
        /// <param name="src">Input image</param>
        /// <param name="dst">Output image</param>
        /// <param name="kernelSize">Window size</param>
        /// <param name="k">Adequate coefficient</param>
#endif
        public static void NiblackFast(Mat src, Mat dst, int kernelSize, double k)
        {
            if (src == null)
                throw new ArgumentNullException("src");
            if (dst == null)
                throw new ArgumentNullException("dst");

            // グレースケールのみ
            if (src.Type() != MatType.CV_8UC1)
                throw new ArgumentException("src must be gray scale image");
            if (dst.Type() != MatType.CV_8UC1)
                throw new ArgumentException("dst must be gray scale image");

            // サイズのチェック
            if (kernelSize < 3)
                throw new ArgumentOutOfRangeException("kernelSize", "size must be 3 and above");
            if (kernelSize % 2 == 0)
                throw new ArgumentOutOfRangeException("kernelSize", "size must be odd number");

            int borderSize = kernelSize / 2;
            int width = src.Width;
            int height = src.Height;
            dst.Create(src.Size(), src.Type());

            using (var tempMat = new Mat(height + (borderSize * 2), width + (borderSize * 2), src.Type()))
            using (var sumMat = new Mat(tempMat.Height + 1, tempMat.Width + 1, MatType.CV_64FC1, 1))
            using (var sqSumMat = new Mat(tempMat.Height + 1, tempMat.Width + 1, MatType.CV_64FC1, 1))
            {
                Cv2.CopyMakeBorder(src, tempMat, borderSize, borderSize, borderSize, borderSize, BorderTypes.Replicate, Scalar.All(0));
                Cv2.Integral(tempMat, sumMat, sqSumMat);

                using (var tSrcMat = new MatOfByte(src))
                using (var tDstMat = new MatOfByte(dst))
                using (var tSumMat = new MatOfDouble(sumMat))
                using (var tSqSumMat = new MatOfDouble(sqSumMat))
                {
                    var tSrc = tSrcMat.GetIndexer();
                    var tDst = tDstMat.GetIndexer();
                    var tSum = tSumMat.GetIndexer();
                    var tSqSum = tSqSumMat.GetIndexer();

                    int ylim = height + borderSize;
                    int xlim = width + borderSize;
                    int kernelPixels = kernelSize * kernelSize;
                    for (int y = borderSize; y < ylim; y++)
                    {
                        for (int x = borderSize; x < xlim; x++)
                        {
                            int x1 = x - borderSize;
                            int y1 = y - borderSize;
                            int x2 = x + borderSize + 1;
                            int y2 = y + borderSize + 1;
                            double sum = tSum[y2, x2] - tSum[y2, x1] - tSum[y1, x2] + tSum[y1, x1];
                            double sqsum = tSqSum[y2, x2] - tSqSum[y2, x1] - tSqSum[y1, x2] + tSqSum[y1, x1];
                            double mean = sum / kernelPixels;
                            double var = (sqsum / kernelPixels) - (mean * mean);
                            if (var < 0.0) var = 0.0;
                            double stddev = Math.Sqrt(var);

                            double threshold = mean + k * stddev;
                            if (tSrc[y - borderSize, x - borderSize] < threshold)
                                tDst[y - borderSize, x - borderSize] = 0;
                            else
                                tDst[y - borderSize, x - borderSize] = 255;
                        }
                    }
                }
            }
        }
Пример #20
0
        /// <summary>
        /// composes 2 [R|t] transformations together. Also computes the derivatives of the result w.r.t the arguments
        /// </summary>
        /// <param name="rvec1">First rotation vector.</param>
        /// <param name="tvec1">First translation vector.</param>
        /// <param name="rvec2">Second rotation vector.</param>
        /// <param name="tvec2">Second translation vector.</param>
        /// <param name="rvec3">Output rotation vector of the superposition.</param>
        /// <param name="tvec3">Output translation vector of the superposition.</param>
        /// <param name="dr3dr1">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dr3dt1">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dr3dr2">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dr3dt2">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dt3dr1">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dt3dt1">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dt3dr2">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        /// <param name="dt3dt2">Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and tvec2, respectively.</param>
        public static void ComposeRT(double[] rvec1, double[] tvec1,
                                     double[] rvec2, double[] tvec2,
                                     out double[] rvec3, out double[] tvec3,
                                     out double[,] dr3dr1, out double[,] dr3dt1,
                                     out double[,] dr3dr2, out double[,] dr3dt2,
                                     out double[,] dt3dr1, out double[,] dt3dt1,
                                     out double[,] dt3dr2, out double[,] dt3dt2)
        {
            if (rvec1 == null)
                throw new ArgumentNullException("rvec1");
            if (tvec1 == null)
                throw new ArgumentNullException("tvec1");
            if (rvec2 == null)
                throw new ArgumentNullException("rvec2");
            if (tvec2 == null)
                throw new ArgumentNullException("tvec2");

            using (var rvec1M = new Mat(3, 1, MatType.CV_64FC1, rvec1))
            using (var tvec1M = new Mat(3, 1, MatType.CV_64FC1, tvec1))
            using (var rvec2M = new Mat(3, 1, MatType.CV_64FC1, rvec2))
            using (var tvec2M = new Mat(3, 1, MatType.CV_64FC1, tvec2))
            using (var rvec3M = new MatOfDouble())
            using (var tvec3M = new MatOfDouble())
            using (var dr3dr1M = new MatOfDouble())
            using (var dr3dt1M = new MatOfDouble())
            using (var dr3dr2M = new MatOfDouble())
            using (var dr3dt2M = new MatOfDouble())
            using (var dt3dr1M = new MatOfDouble())
            using (var dt3dt1M = new MatOfDouble())
            using (var dt3dr2M = new MatOfDouble())
            using (var dt3dt2M = new MatOfDouble())
            {
                NativeMethods.calib3d_composeRT_Mat(rvec1M.CvPtr, tvec1M.CvPtr, rvec2M.CvPtr, tvec2M.CvPtr,
                                                rvec3M.CvPtr, tvec3M.CvPtr,
                                                dr3dr1M.CvPtr, dr3dt1M.CvPtr, dr3dr2M.CvPtr, dr3dt2M.CvPtr,
                                                dt3dr1M.CvPtr, dt3dt1M.CvPtr, dt3dr2M.CvPtr, dt3dt2M.CvPtr);
                rvec3 = rvec3M.ToArray();
                tvec3 = tvec3M.ToArray();
                dr3dr1 = dr3dr1M.ToRectangularArray();
                dr3dt1 = dr3dt1M.ToRectangularArray();
                dr3dr2 = dr3dr2M.ToRectangularArray();
                dr3dt2 = dr3dt2M.ToRectangularArray();
                dt3dr1 = dt3dr1M.ToRectangularArray();
                dt3dt1 = dt3dt1M.ToRectangularArray();
                dt3dr2 = dt3dr2M.ToRectangularArray();
                dt3dt2 = dt3dt2M.ToRectangularArray();
            }
        }
Пример #21
0
    void _ProcessCalibration()
    {
        Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
        Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

        int imageWidth  = Screen.width;
        int imageHeight = Screen.height;
        Mat cameraMat   = new Mat(new Size(imageWidth, imageHeight), CvType.CV_8UC3);

        Imgproc.resize(rgbMat, cameraMat, cameraMat.size());


        Mat gray = new Mat(imageHeight, imageWidth, CvType.CV_8UC1);

        Imgproc.cvtColor(cameraMat, gray, Imgproc.COLOR_RGB2GRAY);

        Mat grayC3 = new Mat(imageHeight, imageWidth, CvType.CV_8UC3);

        Imgproc.cvtColor(gray, grayC3, Imgproc.COLOR_GRAY2RGB);

        int rectW = (int)(imageHeight * 0.4);
        int rectH = (int)(imageHeight * 0.3);
        var x     = (int)(imageWidth * 0.5 - (rectW / 2));
        var y     = (int)(imageHeight * 0.5 - (rectH / 2));
        var rect  = new OpenCVForUnity.Rect(x, y, rectW, rectH);

        var center    = new Point(imageWidth / 2.0, imageHeight / 2.0);
        var lineColor = new Scalar(255, 153, 153);

        var rotatedRect      = new RotatedRect(center, new Size(rectW, rectH), 0);
        var rotatedSmallRect = new RotatedRect(center, new Size((int)(rectW * 0.7), (int)(rectH * 0.7)), 0);

        Imgproc.ellipse(grayC3, rotatedRect, lineColor, 3);
        Imgproc.ellipse(grayC3, rotatedSmallRect, lineColor, 3);

        //outputScreenQuad.setMat(grayC3);

        if (startProcess)
        {
            var mask = Mat.zeros(imageHeight, imageWidth, CvType.CV_8UC1);
            Imgproc.ellipse(mask, rotatedRect, new Scalar(255), -1);

            var hsvChs   = ARUtil.getHSVChannels(cameraMat);
            var yCrCbChs = ARUtil.getYCrCbChannels(cameraMat);


            foreach (var chStr in new List <string> {
                "s", "v", "cr"
            })
            {
                MatOfDouble meanMat   = new MatOfDouble();
                MatOfDouble stddevMat = new MatOfDouble();
                Mat         chMat     = new Mat();
                if (chStr == "s")
                {
                    chMat = hsvChs[1];
                }
                else if (chStr == "v")
                {
                    chMat = hsvChs[2];
                }
                else
                {
                    chMat = yCrCbChs[1];
                }
                Core.meanStdDev(chMat, meanMat, stddevMat, mask);
                var mean   = meanMat.toList()[0];
                var stddev = stddevMat.toList()[0];

                // 95%信頼区間
                if (chStr == "s")
                {
                    s_threshold_lower = mean - stddev * 1.96 - 20;
                    s_threshold_upper = mean + stddev * 1.96 + 20;
                }
                else if (chStr == "v")
                {
                    v_threshold_lower = mean - stddev * 1.96 - 80;
                    v_threshold_upper = mean + stddev * 1.96 + 80;
                }
                else
                {
                    cr_threshold_lower = mean - stddev * 1.96 - 20;
                    cr_threshold_upper = mean + stddev * 1.96 + 20;
                }
            }

            H_sourceMean = (int)(Core.mean(hsvChs[0], mask).val[0]);

            doneSetThreshlod = true;
        }
        else
        {
            outputScreenQuad.setMat(grayC3);
        }
    }
Пример #22
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];


            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);

            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
            }


            MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length];
            for (int i = 0; i < markerDesigns.Length; i++)
            {
                markerDesigns[i] = markerSettings[i].markerDesign;
            }

            markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns);


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            //if WebCamera is frontFaceing,flip Mat.
            webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing;
        }
Пример #23
0
        //javadoc: CascadeClassifier::detectMultiScale3(image, objects, rejectLevels, levelWeights, scaleFactor, minNeighbors, flags, minSize)
        public void detectMultiScale3(Mat image, MatOfRect objects, MatOfInt rejectLevels, MatOfDouble levelWeights, double scaleFactor, int minNeighbors, int flags, Size minSize)
        {
            ThrowIfDisposed();
            if (image != null)
            {
                image.ThrowIfDisposed();
            }
            if (objects != null)
            {
                objects.ThrowIfDisposed();
            }
            if (rejectLevels != null)
            {
                rejectLevels.ThrowIfDisposed();
            }
            if (levelWeights != null)
            {
                levelWeights.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat objects_mat      = objects;
            Mat rejectLevels_mat = rejectLevels;
            Mat levelWeights_mat = levelWeights;
            objdetect_CascadeClassifier_detectMultiScale3_12(nativeObj, image.nativeObj, objects_mat.nativeObj, rejectLevels_mat.nativeObj, levelWeights_mat.nativeObj, scaleFactor, minNeighbors, flags, minSize.width, minSize.height);

            return;
#else
            return;
#endif
        }
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(webCamTextureMat, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", webCamTextureToMatHelper.GetWidth().ToString());
                fpsMonitor.Add("height", webCamTextureToMatHelper.GetHeight().ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // create AR camera P * V Matrix
            Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 2000f);
            Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

            VP = P * V;

            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
            }


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);

            mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem>(true);
        }
Пример #25
0
        //javadoc: CascadeClassifier::detectMultiScale3(image, objects, rejectLevels, levelWeights)
        public void detectMultiScale3(Mat image, MatOfRect objects, MatOfInt rejectLevels, MatOfDouble levelWeights)
        {
            ThrowIfDisposed();
            if (image != null)
            {
                image.ThrowIfDisposed();
            }
            if (objects != null)
            {
                objects.ThrowIfDisposed();
            }
            if (rejectLevels != null)
            {
                rejectLevels.ThrowIfDisposed();
            }
            if (levelWeights != null)
            {
                levelWeights.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat objects_mat      = objects;
            Mat rejectLevels_mat = rejectLevels;
            Mat levelWeights_mat = levelWeights;
            objdetect_CascadeClassifier_detectMultiScale3_16(nativeObj, image.nativeObj, objects_mat.nativeObj, rejectLevels_mat.nativeObj, levelWeights_mat.nativeObj);

            return;
#else
            return;
#endif
        }
        void InitializeCameraMatrix(Mat inputImageMat)
        {
            Debug.Log("******************************");

            float width  = inputImageMat.width();
            float height = inputImageMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }

            // Set camera param
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            _CamMatrix = new Mat(3, 3, CvType.CV_64FC1);
            _CamMatrix.put(0, 0, fx);
            _CamMatrix.put(0, 1, 0);
            _CamMatrix.put(0, 2, cx);
            _CamMatrix.put(1, 0, 0);
            _CamMatrix.put(1, 1, fy);
            _CamMatrix.put(1, 2, cy);
            _CamMatrix.put(2, 0, 0);
            _CamMatrix.put(2, 1, 0);
            _CamMatrix.put(2, 2, 1.0f);
            Debug.Log("CamMatrix " + _CamMatrix.dump());

            _DistCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("DistCoeffs " + _DistCoeffs.dump());

            // Calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(_CamMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("ImageSize " + imageSize.ToString());
            Debug.Log("ApertureWidth " + apertureWidth);
            Debug.Log("ApertureHeight " + apertureHeight);
            Debug.Log("Fovx " + fovx [0]);
            Debug.Log("Fovy " + fovy [0]);
            Debug.Log("FocalLength " + focalLength [0]);
            Debug.Log("PrincipalPoint " + principalPoint.ToString());
            Debug.Log("Aspectratio " + aspectratio [0]);

            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("FovXScale " + fovXScale);
            Debug.Log("FovYScale " + fovYScale);

            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }

            Debug.Log("******************************");
        }
Пример #27
0
        // Update is called once per frame
        void Update()
        {
            //Loop play
                        if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
                                capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);

                        //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
                        if (capture.grab ()) {

                                capture.retrieve (rgbMat, 0);

                                Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                                using (HOGDescriptor des = new HOGDescriptor())
                                using (MatOfRect locations = new MatOfRect ())
                                using (MatOfDouble weights = new MatOfDouble ()) {
                                        des.setSVMDetector (HOGDescriptor.getDefaultPeopleDetector ());
                                        des.detectMultiScale (rgbMat, locations, weights);

                                        OpenCVForUnity.Rect[] rects = locations.toArray ();
                                        for (int i = 0; i < rects.Length; i++) {
            //												Debug.Log ("detected person " + rects [i]);
                                                Imgproc.rectangle (rgbMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0), 2);
                                        }
            //										Debug.Log (locations.ToString ());
            //										Debug.Log (weights.ToString ());
                                }

                                Utils.matToTexture2D (rgbMat, texture);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }
        }
Пример #28
0
    private void CameraInit(int height, int width)
    {
        double fx;
        double fy;
        double cx;
        double cy;

        int max_d = (int)Mathf.Max(width, height);

        fx = 3.1145896e+02;
        fy = 3.0654921e+02;
        cx = 4.0354616e+02;
        cy = 4.0661785e+02;

        camMatrix = new Mat(3, 3, CvType.CV_64FC1);
        camMatrix.put(0, 0, fx);
        camMatrix.put(0, 1, 0);
        camMatrix.put(0, 2, cx);
        camMatrix.put(1, 0, 0);
        camMatrix.put(1, 1, fy);
        camMatrix.put(1, 2, cy);
        camMatrix.put(2, 0, 0);
        camMatrix.put(2, 1, 0);
        camMatrix.put(2, 2, 1.0f);

        distCoeffs = new MatOfDouble(-2.971244e-01, 8.356048e-02, -4.74639e-03, 8.1501643e-05, -9.992362e-03);



        Debug.Log("Import the calibration data.");



        float imageSizeScale = 1.0f;
        float widthScale     = (float)Screen.width / width;
        float heightScale    = (float)Screen.height / height;
        //if (widthScale < heightScale) {
        //  Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
        //  imageSizeScale = (float)Screen.height / (float)Screen.width;
        //} else {
        //  Camera.main.orthographicSize = height / 2;
        //}



        // calibration camera matrix values.
        Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
        double apertureWidth  = 0;
        double apertureHeight = 0;

        double[] fovx           = new double[1];
        double[] fovy           = new double[1];
        double[] focalLength    = new double[1];
        Point    principalPoint = new Point(0, 0);

        double[] aspectratio = new double[1];

        Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

        Debug.Log("imageSize " + imageSize.ToString());
        Debug.Log("apertureWidth " + apertureWidth);
        Debug.Log("apertureHeight " + apertureHeight);
        Debug.Log("fovx " + fovx[0]);
        Debug.Log("fovy " + fovy[0]);
        Debug.Log("focalLength " + focalLength[0]);
        Debug.Log("principalPoint " + principalPoint.ToString());
        Debug.Log("aspectratio " + aspectratio[0]);


        // To convert the difference of the FOV value of the OpenCV and Unity.
        double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
        double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

        Debug.Log("fovXScale " + fovXScale);
        Debug.Log("fovYScale " + fovYScale);


        // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
        if (widthScale < heightScale)
        {
            arCamera.fieldOfView = (float)(fovx[0] * fovXScale);
        }
        else
        {
            arCamera.fieldOfView = (float)(fovy[0] * fovYScale);
        }
        // Display objects near the camera.
        arCamera.nearClipPlane = 0.01f;


        //Init all of the varibles
        ids             = new Mat();
        list_ids        = new List <int>();
        corners         = new List <Mat>();
        rejectedCorners = new List <Mat>();
        rvecs           = new Mat();
        tvecs           = new Mat();
        rotMat          = new Mat(3, 3, CvType.CV_64FC1);


        detectorParams = DetectorParameters.create();
        dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);

        rvec          = new Mat();
        tvec          = new Mat();
        recoveredIdxs = new Mat();
    }
Пример #29
0
        // Super fast color transfer between images. (Slightly faster than processing in Lab color format)
        private void TransferColor_YCrCb(Mat source, Mat target, Mat mask)
        {
            bool is4chanelColor = false;

            if (source.channels() == 4)
            {
                if (sourceMat_c3 == null)
                {
                    sourceMat_c3 = new Mat();
                }
                if (targetMat_c3 == null)
                {
                    targetMat_c3 = new Mat();
                }

                is4chanelColor = true;
                Imgproc.cvtColor(source, sourceMat_c3, Imgproc.COLOR_RGBA2RGB);
                Imgproc.cvtColor(target, targetMat_c3, Imgproc.COLOR_RGBA2RGB);
            }
            else
            {
                sourceMat_c3 = source;
                targetMat_c3 = target;
            }

            if (sourceMatYCrCb == null)
            {
                sourceMatYCrCb = new Mat();
            }
            if (targetMatYCrCb == null)
            {
                targetMatYCrCb = new Mat();
            }

            Imgproc.cvtColor(sourceMat_c3, sourceMatYCrCb, Imgproc.COLOR_RGB2YCrCb);
            Imgproc.cvtColor(targetMat_c3, targetMatYCrCb, Imgproc.COLOR_RGB2YCrCb);

            MatOfDouble labMeanSrc = new MatOfDouble();
            MatOfDouble labStdSrc  = new MatOfDouble();

            Core.meanStdDev(sourceMatYCrCb, labMeanSrc, labStdSrc, mask);

            MatOfDouble labMeanTar = new MatOfDouble();
            MatOfDouble labStdTar  = new MatOfDouble();

            Core.meanStdDev(targetMatYCrCb, labMeanTar, labStdTar, mask);

            targetMatYCrCb.convertTo(targetMatYCrCb, CvType.CV_32FC3);

            // subtract the means from the target image
            double[] labMeanTarArr = labMeanTar.toArray();
            Core.subtract(targetMatYCrCb, new Scalar(labMeanTarArr[0], labMeanTarArr[1], labMeanTarArr[2]), targetMatYCrCb);

            // scale by the standard deviations
            double[] labStdTarArr = labStdTar.toArray();
            double[] labStdSrcArr = labStdSrc.toArray();
            Scalar   scalar       = new Scalar(labStdTarArr[0] / labStdSrcArr[0], labStdTarArr[1] / labStdSrcArr[1], labStdTarArr[2] / labStdSrcArr[2]);

            Core.multiply(targetMatYCrCb, scalar, targetMatYCrCb);

            // add in the source mean
            double[] labMeanSrcArr = labMeanSrc.toArray();
            Core.add(targetMatYCrCb, new Scalar(labMeanSrcArr[0], labMeanSrcArr[1], labMeanSrcArr[2]), targetMatYCrCb);

            // clip the pixel intensities to [0, 255] if they fall outside this range.
            //Imgproc.threshold (targetMatYCrCb, targetMatYCrCb, 0, 0, Imgproc.THRESH_TOZERO);
            //Imgproc.threshold (targetMatYCrCb, targetMatYCrCb, 255, 255, Imgproc.THRESH_TRUNC);

            targetMatYCrCb.convertTo(targetMatYCrCb, CvType.CV_8UC3);
            Imgproc.cvtColor(targetMatYCrCb, targetMat_c3, Imgproc.COLOR_YCrCb2RGB);

            if (is4chanelColor)
            {
                Imgproc.cvtColor(targetMat_c3, target, Imgproc.COLOR_RGB2RGBA);
            }
        }
Пример #30
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath);

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("dnn/COCO_val2014_000000000589.jpg is not loaded.The image file can be downloaded here: \"https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/examples/media/COCO_val2014_000000000589.jpg\" folder. ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }
            #endif


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;
            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError("model file is not loaded. The model and prototxt file can be downloaded here: \"http://posefs1.perception.cs.cmu.edu/OpenPose/models/pose/mpi/pose_iter_160000.caffemodel\",\"https://github.com/opencv/opencv_extra/blob/master/testdata/dnn/openpose_pose_mpi_faster_4_stages.prototxt\". Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                net = Dnn.readNetFromCaffe(prototxt_filepath, caffemodel_filepath);

                //Intel's Deep Learning Inference Engine backend is supported on Windows 64bit platform only. Please refer to ReadMe.pdf for the setup procedure.
                //net.setPreferableBackend (Dnn.DNN_BACKEND_INFERENCE_ENGINE);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat input = Dnn.blobFromImage(img, 1.0 / 255, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);

                net.setInput(input);

//                TickMeter tm = new TickMeter ();
//                tm.start ();

                Mat output = net.forward();

//                tm.stop ();
//                Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());


                output = output.reshape(1, 16);


                float[]      data   = new float[46 * 46];
                List <Point> points = new List <Point> ();
                for (int i = 0; i < BODY_PARTS.Count; i++)
                {
                    output.get(i, 0, data);

                    Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1);
                    heatMap.put(0, 0, data);


                    //Originally, we try to find all the local maximums. To simplify a sample
                    //we just find a global one. However only a single pose at the same time
                    //could be detected this way.
                    Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);

                    heatMap.Dispose();


                    double x = (frameWidth * (result.maxLoc.x % 46)) / 46;
                    double y = (frameHeight * (result.maxLoc.x / 46)) / 46;

                    if (result.maxVal > 0.1)
                    {
                        points.Add(new Point(x, y));
                    }
                    else
                    {
                        points.Add(null);
                    }
                }

                for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
                {
                    string partFrom = POSE_PAIRS [i, 0];
                    string partTo   = POSE_PAIRS [i, 1];

                    int idFrom = BODY_PARTS [partFrom];
                    int idTo   = BODY_PARTS [partTo];

                    if (points [idFrom] != null && points [idTo] != null)
                    {
                        Imgproc.line(img, points [idFrom], points [idTo], new Scalar(0, 255, 0), 3);
                        Imgproc.ellipse(img, points [idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                        Imgproc.ellipse(img, points [idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    }
                }



                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                Debug.Log("t: " + t);
                Debug.Log("timings.dump(): " + timings.dump());

                double freq = Core.getTickFrequency() / 1000;
                Debug.Log("freq: " + freq);

                Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Пример #31
0
        private void Run()
        {
            //set 3d face object points.
            objectPoints68 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 120), //nose (Nose top)
                new Point3(-26, 15, 83),  //l mouse (Mouth breadth)
                new Point3(26, 15, 83),   //r mouse (Mouth breadth)
                new Point3(-79, 90, 0.0), //l ear (Bitragion breadth)
                new Point3(79, 90, 0.0)   //r ear (Bitragion breadth)
                );
            objectPoints5 = new MatOfPoint3f(
                new Point3(-23, 90, 83), //l eye (Inner corner of the eye)
                new Point3(23, 90, 83),  //r eye (Inner corner of the eye)
                new Point3(-50, 90, 80), //l eye (Tail of the eye)
                new Point3(50, 90, 80),  //r eye (Tail of the eye)
                new Point3(0.0, 50, 120) //nose (Nose top)
                );
            imagePoints = new MatOfPoint2f();

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(dance_avi_filepath);

            if (capture.isOpened())
            {
                Debug.Log("capture.isOpened() true");
            }
            else
            {
                Debug.Log("capture.isOpened() false");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get(Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", frameWidth.ToString());
                fpsMonitor.Add("height", frameHeight.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = (float)frameWidth;
            float height = (float)frameHeight;

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());


            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);

            mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true);
        }
Пример #32
0
        /// <summary>
        /// Find the Euler matrix from the output of SolvePnP.
        /// </summary>
        /// <param name="rotation">The rotation matrix returned by SolvePnp.</param>
        /// <returns>The Euler matrix containing pitch, roll, and yaw angles.</returns>
        public static MatOfDouble GetEulerMatrix(Mat rotation)
        {
            // convert the 1x3 rotation vector to a full 3x3 matrix
            var r = new MatOfDouble(3, 3);

            Cv2.Rodrigues(rotation, r);

            // set up some shortcuts to rotation matrix
            double m00 = r.At <double>(0, 0);
            double m01 = r.At <double>(0, 1);
            double m02 = r.At <double>(0, 2);
            double m10 = r.At <double>(1, 0);
            double m11 = r.At <double>(1, 1);
            double m12 = r.At <double>(1, 2);
            double m20 = r.At <double>(2, 0);
            double m21 = r.At <double>(2, 1);
            double m22 = r.At <double>(2, 2);

            // set up output variables
            Euler euler_out  = new Euler();
            Euler euler_out2 = new Euler();

            if (Math.Abs(m20) >= 1)
            {
                euler_out.yaw  = 0;
                euler_out2.yaw = 0;

                // From difference of angles formula
                if (m20 < 0)  //gimbal locked down
                {
                    double delta = Math.Atan2(m01, m02);
                    euler_out.pitch  = Math.PI / 2f;
                    euler_out2.pitch = Math.PI / 2f;
                    euler_out.roll   = delta;
                    euler_out2.roll  = delta;
                }
                else // gimbal locked up
                {
                    double delta = Math.Atan2(-m01, -m02);
                    euler_out.pitch  = -Math.PI / 2f;
                    euler_out2.pitch = -Math.PI / 2f;
                    euler_out.roll   = delta;
                    euler_out2.roll  = delta;
                }
            }
            else
            {
                euler_out.pitch  = -Math.Asin(m20);
                euler_out2.pitch = Math.PI - euler_out.pitch;

                euler_out.roll  = Math.Atan2(m21 / Math.Cos(euler_out.pitch), m22 / Math.Cos(euler_out.pitch));
                euler_out2.roll = Math.Atan2(m21 / Math.Cos(euler_out2.pitch), m22 / Math.Cos(euler_out2.pitch));

                euler_out.yaw  = Math.Atan2(m10 / Math.Cos(euler_out.pitch), m00 / Math.Cos(euler_out.pitch));
                euler_out2.yaw = Math.Atan2(m10 / Math.Cos(euler_out2.pitch), m00 / Math.Cos(euler_out2.pitch));
            }

            // return result
            return(new MatOfDouble(1, 3, new double[] { euler_out.yaw, euler_out.roll, euler_out.pitch }));
            // return new MatOfDouble(1, 3, new double[] { euler_out2.yaw, euler_out2.roll, euler_out2.pitch });
        }
Пример #33
0
 void InitializeImageDetector(Mat inputImageMat, double fx, double fy, double cx, double cy, MatOfDouble distCoeffs)
 {
     InitializePatternDetector();
     InitializeMatrix();
     InitializeCameraMatrix(inputImageMat, fx, fy, cx, cy, distCoeffs);
 }
Пример #34
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat grayMat = webCamTextureToMatHelper.GetMat();

            float rawFrameWidth  = grayMat.width();
            float rawFrameHeight = grayMat.height();

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            float width  = downScaleMat.width();
            float height = downScaleMat.height();

            texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1);
            previewQuad.SetActive(displayCameraPreview);


            //Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);


            DebugUtils.AddDebugStr(webCamTextureToMatHelper.GetWidth() + " x " + webCamTextureToMatHelper.GetHeight() + " : " + webCamTextureToMatHelper.GetFPS());
            if (enableDownScale)
            {
                DebugUtils.AddDebugStr("enableDownScale = true: " + DOWNSCALE_RATIO + " / " + width + " x " + height);
            }


            // create camera matrix and dist coeffs.
            string loadDirectoryPath               = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample");
            string calibratonDirectoryName         = "camera_parameters" + rawFrameWidth + "x" + rawFrameWidth;
            string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
            string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");

            if (useStoredCameraParameters && File.Exists(loadPath))
            {
                // If there is a camera parameters stored by HoloLensArUcoCameraCalibrationExample, use it

                CameraParameters param;
                XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));
                using (var stream = new FileStream(loadPath, FileMode.Open))
                {
                    param = (CameraParameters)serializer.Deserialize(stream);
                }

                double fx = param.camera_matrix[0];
                double fy = param.camera_matrix[4];
                double cx = param.camera_matrix[2];
                double cy = param.camera_matrix[5];

                camMatrix  = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());

                Debug.Log("Loaded CameraParameters from a stored XML file.");
                Debug.Log("loadPath: " + loadPath);

                DebugUtils.AddDebugStr("Loaded CameraParameters from a stored XML file.");
                DebugUtils.AddDebugStr("loadPath: " + loadPath);
            }
            else
            {
                if (useStoredCameraParameters && !File.Exists(loadPath))
                {
                    DebugUtils.AddDebugStr("The CameraParameters XML file (" + loadPath + ") does not exist.");
                }

#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
                CameraIntrinsics cameraIntrinsics = webCamTextureToMatHelper.GetCameraIntrinsics();

                camMatrix  = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2);

                Debug.Log("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device.");

                DebugUtils.AddDebugStr("Created CameraParameters from VideoMediaFrame.CameraIntrinsics on device.");
#else
                // The camera matrix value of Hololens camera 896x504 size.
                // For details on the camera matrix, please refer to this page. (http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html)
                // These values ​​are unique to my device, obtained from the "Windows.Media.Devices.Core.CameraIntrinsics" class. (https://docs.microsoft.com/en-us/uwp/api/windows.media.devices.core.cameraintrinsics)
                // Can get these values by using this helper script. (https://github.com/EnoxSoftware/HoloLensWithOpenCVForUnityExample/tree/master/Assets/HololensCameraIntrinsicsChecker/CameraIntrinsicsCheckerHelper)
                double fx          = 1035.149;   //focal length x.
                double fy          = 1034.633;   //focal length y.
                double cx          = 404.9134;   //principal point x.
                double cy          = 236.2834;   //principal point y.
                double distCoeffs1 = 0.2036923;  //radial distortion coefficient k1.
                double distCoeffs2 = -0.2035773; //radial distortion coefficient k2.
                double distCoeffs3 = 0.0;        //tangential distortion coefficient p1.
                double distCoeffs4 = 0.0;        //tangential distortion coefficient p2.
                double distCoeffs5 = -0.2388065; //radial distortion coefficient k3.

                camMatrix  = CreateCameraMatrix(fx, fy, cx / DOWNSCALE_RATIO, cy / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5);

                Debug.Log("Created a dummy CameraParameters (896x504).");

                DebugUtils.AddDebugStr("Created a dummy CameraParameters (896x504).");
#endif
            }

            Debug.Log("camMatrix " + camMatrix.dump());
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //DebugUtils.AddDebugStr("camMatrix " + camMatrix.dump());
            //DebugUtils.AddDebugStr("distCoeffs " + distCoeffs.dump());


            //Calibration camera
            Size     imageSize      = new Size(width, height);
            double   apertureWidth  = 0;
            double   apertureHeight = 0;
            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);
            double[] aspectratio    = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);

            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;

            ids             = new Mat();
            corners         = new List <Mat>();
            rejectedCorners = new List <Mat>();
            rvecs           = new Mat();
            tvecs           = new Mat();
            rotMat          = new Mat(3, 3, CvType.CV_64FC1);


            transformationM = new Matrix4x4();

            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());

            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250);


            //If WebCamera is frontFaceing, flip Mat.
            webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.IsFrontFacing();

            rgbMat4preview = new Mat();
        }
Пример #35
0
        void InitializeCameraMatrix(Mat inputImageMat, double fx, double fy, double cx, double cy, MatOfDouble distCoeffs)
        {
            Debug.Log("******************************");

            float width  = inputImageMat.width();
            float height = inputImageMat.height();

            // Set camera param
            _CamMatrix = new Mat(3, 3, CvType.CV_64FC1);
            _CamMatrix.put(0, 0, fx);
            _CamMatrix.put(0, 1, 0);
            _CamMatrix.put(0, 2, cx);
            _CamMatrix.put(1, 0, 0);
            _CamMatrix.put(1, 1, fy);
            _CamMatrix.put(1, 2, cy);
            _CamMatrix.put(2, 0, 0);
            _CamMatrix.put(2, 1, 0);
            _CamMatrix.put(2, 2, 1.0f);
            Debug.Log("CamMatrix " + _CamMatrix.dump());

            _DistCoeffs = distCoeffs;
            Debug.Log("DistCoeffs " + _DistCoeffs.dump());

            // Calibration camera
            Size   imageSize      = new Size(width, height);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(_CamMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("ImageSize " + imageSize.ToString());
            Debug.Log("ApertureWidth " + apertureWidth);
            Debug.Log("ApertureHeight " + apertureHeight);
            Debug.Log("Fovx " + fovx [0]);
            Debug.Log("Fovy " + fovy [0]);
            Debug.Log("FocalLength " + focalLength [0]);
            Debug.Log("PrincipalPoint " + principalPoint.ToString());
            Debug.Log("Aspectratio " + aspectratio [0]);

            Debug.Log("******************************");
        }
Пример #36
0
    void ProcessCalibration()
    {
        // UnityのTexture2DからOpencvのMatに変換
        int imageWidth  = cameraTexture.width;
        int imageHeight = cameraTexture.height;

        UnityEngine.Rect wholeRect = new UnityEngine.Rect(0, 0, cameraTexture.width, cameraTexture.height);
        cameraTexture.ReadPixels(wholeRect, 0, 0, true);
        //cameraMat = new Mat(imageHeight, imageWidth, CvType.CV_8UC3);
        //cameraTexture = new Texture2D(imageWidth, imageHeight, TextureFormat.ARGB32, false);
        //image.CopyToTexture(cameraTexture);
        Utils.texture2DToMat(cameraTexture, cameraMat);


        Mat gray = new Mat(imageHeight, imageWidth, CvType.CV_8UC1);

        Imgproc.cvtColor(cameraMat, gray, Imgproc.COLOR_RGB2GRAY);

        Mat grayC3 = new Mat(imageHeight, imageWidth, CvType.CV_8UC3);

        Imgproc.cvtColor(gray, grayC3, Imgproc.COLOR_GRAY2RGB);

        int rectW = (int)(imageHeight * 0.4);
        int rectH = (int)(imageHeight * 0.3);
        var x     = (int)(imageWidth * 0.5 - (rectW / 2));
        var y     = (int)(imageHeight * 0.5 - (rectH / 2));
        var rect  = new OpenCVForUnity.Rect(x, y, rectW, rectH);

        var center    = new Point(imageWidth / 2.0, imageHeight / 2.0);
        var lineColor = new Scalar(255, 153, 153);

        var rotatedRect      = new RotatedRect(center, new Size(rectW, rectH), 0);
        var rotatedSmallRect = new RotatedRect(center, new Size((int)(rectW * 0.7), (int)(rectH * 0.7)), 0);

        Imgproc.ellipse(grayC3, rotatedRect, lineColor, 3);
        Imgproc.ellipse(grayC3, rotatedSmallRect, lineColor, 3);

        //outputScreenQuad.setMat(grayC3);


        if (startProcess)
        {
            Debug.Log("startProcess");
            var mask = Mat.zeros(imageHeight, imageWidth, CvType.CV_8UC1);
            Imgproc.ellipse(mask, rotatedRect, new Scalar(255), -1);

            var hsvChs   = ARUtil.getHSVChannels(cameraMat);
            var yCrCbChs = ARUtil.getYCrCbChannels(cameraMat);

            foreach (var chStr in new List <string> {
                "s", "v", "cr"
            })
            {
                MatOfDouble meanMat   = new MatOfDouble();
                MatOfDouble stddevMat = new MatOfDouble();
                Mat         chMat     = new Mat();
                if (chStr == "s")
                {
                    chMat = hsvChs[1];
                }
                else if (chStr == "v")
                {
                    chMat = hsvChs[2];
                }
                else
                {
                    chMat = yCrCbChs[1];
                }
                Core.meanStdDev(chMat, meanMat, stddevMat, mask);
                var mean   = meanMat.toList()[0];
                var stddev = stddevMat.toList()[0];

                if (chStr == "s")
                {
                    s_threshold_lower = mean - stddev * 2;
                    s_threshold_upper = mean + stddev * 2;
                }
                else if (chStr == "v")
                {
                    v_threshold_lower = mean - stddev * 2;
                    v_threshold_upper = mean + stddev * 2;
                }
                else
                {
                    cr_threshold_lower = mean - stddev * 2;
                    cr_threshold_upper = mean + stddev * 2;
                }
            }


            doneSetThreshlod = true;
        }
        else
        {
            outputScreenQuad.setMat(grayC3);
        }
    }
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = Mathf.Round(webCamTextureMat.width() / downscaleRatio);
            float height = Mathf.Round(webCamTextureMat.height() / downscaleRatio);

            texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);
            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(1, height / width, 1);
            previewQuad.SetActive(displayCameraPreview);

            double fx = this.fx;
            double fy = this.fy;
            double cx = this.cx / downscaleRatio;
            double cy = this.cy / downscaleRatio;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //Calibration camera
            Size   imageSize      = new Size(width, height);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            grayMat  = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
            ids      = new Mat();
            corners  = new List <Mat> ();
            rejected = new List <Mat> ();
            rvecs    = new Mat();
            tvecs    = new Mat();
            rotMat   = new Mat(3, 3, CvType.CV_64FC1);


            transformationM = new Matrix4x4();

            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());

            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250);


            //If WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }

            rgbaMat4Thread   = new Mat();
            downScaleRgbaMat = new Mat();
            rgbMat4preview   = new Mat();
        }
Пример #38
0
        /// <summary>
        /// Computes RQ decomposition of 3x3 matrix
        /// </summary>
        /// <param name="src">3x3 input matrix.</param>
        /// <param name="mtxR">Output 3x3 upper-triangular matrix.</param>
        /// <param name="mtxQ"> Output 3x3 orthogonal matrix.</param>
        /// <param name="qx">Optional output 3x3 rotation matrix around x-axis.</param>
        /// <param name="qy">Optional output 3x3 rotation matrix around y-axis.</param>
        /// <param name="qz">Optional output 3x3 rotation matrix around z-axis.</param>
        /// <returns></returns>
        public static Vec3d RQDecomp3x3(double[,] src, out double[,] mtxR, out double[,] mtxQ,
            out double[,] qx, out double[,] qy, out double[,] qz)
        {
            if (src == null)
                throw new ArgumentNullException("src");
            if (src.GetLength(0) != 3 || src.GetLength(1) != 3)
                throw new ArgumentException("src must be double[3,3]");

            using (var srcM = new Mat(3, 3, MatType.CV_64FC1))
            using (var mtxRM = new MatOfDouble())
            using (var mtxQM = new MatOfDouble())
            using (var qxM = new MatOfDouble())
            using (var qyM = new MatOfDouble())
            using (var qzM = new MatOfDouble())
            {
                Vec3d ret;
                NativeMethods.calib3d_RQDecomp3x3_Mat(srcM.CvPtr, 
                    mtxRM.CvPtr, mtxQM.CvPtr, qxM.CvPtr, qyM.CvPtr, qzM.CvPtr, 
                    out ret);
                mtxR = mtxRM.ToRectangularArray();
                mtxQ = mtxQM.ToRectangularArray();
                qx = qxM.ToRectangularArray();
                qy = qyM.ToRectangularArray();
                qz = qzM.ToRectangularArray();
                return ret;
            }
        }
        /// <summary>
        /// Raises the webcam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", webCamTextureMat.width().ToString());
                fpsMonitor.Add("height", webCamTextureMat.height().ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            double fx;
            double fy;
            double cx;
            double cy;

            string loadDirectoryPath               = Path.Combine(Application.persistentDataPath, "ArUcoCameraCalibrationExample");
            string calibratonDirectoryName         = "camera_parameters" + width + "x" + height;
            string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
            string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");

            if (useStoredCameraParameters && File.Exists(loadPath))
            {
                CameraParameters param;
                XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));
                using (var stream = new FileStream(loadPath, FileMode.Open)) {
                    param = (CameraParameters)serializer.Deserialize(stream);
                }

                camMatrix  = param.GetCameraMatrix();
                distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());

                fx = param.camera_matrix [0];
                fy = param.camera_matrix [4];
                cx = param.camera_matrix [2];
                cy = param.camera_matrix [5];

                Debug.Log("Loaded CameraParameters from a stored XML file.");
                Debug.Log("loadPath: " + loadPath);
            }
            else
            {
                int max_d = (int)Mathf.Max(width, height);
                fx = max_d;
                fy = max_d;
                cx = width / 2.0f;
                cy = height / 2.0f;

                camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                camMatrix.put(0, 0, fx);
                camMatrix.put(0, 1, 0);
                camMatrix.put(0, 2, cx);
                camMatrix.put(1, 0, 0);
                camMatrix.put(1, 1, fy);
                camMatrix.put(1, 2, cy);
                camMatrix.put(2, 0, 0);
                camMatrix.put(2, 1, 0);
                camMatrix.put(2, 2, 1.0f);

                distCoeffs = new MatOfDouble(0, 0, 0, 0);

                Debug.Log("Created a dummy CameraParameters.");
            }

            Debug.Log("camMatrix " + camMatrix.dump());
            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;


            rgbMat          = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
            ids             = new Mat();
            corners         = new List <Mat> ();
            rejectedCorners = new List <Mat> ();
            rvecs           = new Mat();
            tvecs           = new Mat();
            rotMat          = new Mat(3, 3, CvType.CV_64FC1);


            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);

            rvec          = new Mat();
            tvec          = new Mat();
            recoveredIdxs = new Mat();

            gridBoard = GridBoard.create(gridBoradMarkersX, gridBoradMarkersY, gridBoradMarkerLength, gridBoradMarkerSeparation, dictionary, gridBoradMarkerFirstMarker);

            charucoCorners = new Mat();
            charucoIds     = new Mat();
            charucoBoard   = CharucoBoard.create(chArUcoBoradSquaresX, chArUcoBoradSquaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);

            diamondCorners = new List <Mat> ();
            diamondIds     = new Mat(1, 1, CvType.CV_32SC4);
            diamondIds.put(0, 0, new int[] { diamondId1, diamondId2, diamondId3, diamondId4 });


            // if WebCamera is frontFaceing, flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }
        }
Пример #40
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            // HololensCameraStream always returns image data in BGRA format.
            texture = new Texture2D((int)width, (int)height, TextureFormat.BGRA32, false);
            #else
            texture = new Texture2D((int)width, (int)height, TextureFormat.RGBA32, false);
            #endif

            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(1, height / width, 1);
            previewQuad.SetActive(displayCameraPreview);


            double fx = this.fx;
            double fy = this.fy;
            double cx = this.cx / imageOptimizationHelper.downscaleRatio;
            double cy = this.cy / imageOptimizationHelper.downscaleRatio;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(distCoeffs1, distCoeffs2, distCoeffs3, distCoeffs4, distCoeffs5);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //Calibration camera
            Size     imageSize      = new Size(width, height);
            double   apertureWidth  = 0;
            double   apertureHeight = 0;
            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);
            double[] aspectratio    = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            transformationM = new Matrix4x4();

            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);

            mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem> (true);


            //If WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }

            grayMat = new Mat();
            cascade = new CascadeClassifier();
            cascade.load(OpenCVForUnity.Utils.getFilePath("lbpcascade_frontalface.xml"));

            // "empty" method is not working on the UWP platform.
            //            if (cascade.empty ()) {
            //                Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //            }

            grayMat4Thread = new Mat();
            cascade4Thread = new CascadeClassifier();
            cascade4Thread.load(OpenCVForUnity.Utils.getFilePath("haarcascade_frontalface_alt.xml"));

            // "empty" method is not working on the UWP platform.
            //            if (cascade4Thread.empty ()) {
            //                Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //            }

            detectionResult = new MatOfRect();
        }
Пример #41
0
        void Awake()
        {
            Application.targetFrameRate = 30;

            // Check resources.
            if (_operationMode == OperationMode.ManualSamlping && !_manualSampleButton)
            {
                Debug.LogError(logPrepend + "Missing sample button. You must provide a sample button when OperationMode is " + OperationMode.ManualSamlping);
                enabled = false;
                return;
            }

            // Load files.
            if (!Intrinsics.TryLoadFromFile(_cameraIntrinsicsFileName, out _cameraIntrinsics))
            {
                enabled = false;
                return;
            }

            // Find shaders.
            Shader unlitColorShader         = Shader.Find("Unlit/Color");
            Shader unlitTextureShader       = Shader.Find("Unlit/Texture");
            Shader unlitTintedTextureShader = Shader.Find("Unlit/TintedInvertibleTexture");

            _sb = new StringBuilder();

            // Operation mode dependent things.
            _stableSampleCountThreshold = _operationMode == OperationMode.ManualSamlping ? stableFrameCountThresholdForManualSampling : stableFrameCountThresholdForTimedSampling;
            if (_manualSampleButton)
            {
                _manualSampleButton.gameObject.SetActive(_operationMode == OperationMode.ManualSamlping);
                _manualSampleButton.interactable = false;
            }

            // Prepare OpenCV.
            _cameraExtrinsicsCalibrator    = new CameraExtrinsicsCalibrator();
            _projectorExtrinsicsCalibrator = new ProjectorFromCameraExtrinsicsCalibrator();
            _noDistCoeffs = new MatOfDouble(new double[] { 0, 0, 0, 0 });
            _circlePointsProjectorRenderImageMat = new MatOfPoint2f();
            _circlePointsRealModelMat            = new MatOfPoint3f();
            _circlePointsDetectedWorldMat        = new MatOfPoint3f();
            _undistortMap1 = new Mat();
            _undistortMap2 = new Mat();

            // Create patterns.
            TrackingToolsHelper.RenderPattern(_chessPatternSize, TrackingToolsHelper.PatternType.Chessboard, 1024, ref _chessPatternTexture, ref _patternRenderMaterial);

            // Layers.
            int uiLayer             = LayerMask.NameToLayer("UI");
            int mainCameraLayerMask = LayerMask.GetMask("Default");
            int projectorLayer      = LayerMask.NameToLayer("TransparentFX");
            int projectorLayerMask  = LayerMask.GetMask("TransparentFX");

            // Objects.
            _calibrationBoardTransform = new GameObject("CalibrationBoard").transform;

            // Create and prepare UI.
            _cameraAspectFitter = _processedCameraImage.GetComponent <AspectRatioFitter>();
            if (!_cameraAspectFitter)
            {
                _cameraAspectFitter = _processedCameraImage.gameObject.AddComponent <AspectRatioFitter>();
            }
            _cameraAspectFitter.aspectMode = AspectRatioFitter.AspectMode.FitInParent;
            _previewMaterial = new Material(Shader.Find(TrackingToolsConstants.previewShaderName));
            _processedCameraImage.gameObject.layer = uiLayer;
            _processedCameraImage.material         = _previewMaterial;
            _processedCameraImage.color            = Color.white;

            _arImage = new GameObject("ARImage").AddComponent <RawImage>();
            _arImage.transform.SetParent(_processedCameraImage.transform);
            _arImage.transform.SetAsFirstSibling();
            _arImage.raycastTarget = false;
            _arImage.rectTransform.FitParent();
            _arImage.gameObject.layer = uiLayer;

            _mainCamera.transform.SetPositionAndRotation(Vector3.zero, Quaternion.identity);
            _mainCamera.cullingMask = mainCameraLayerMask;

            _projectorCamera.transform.SetPositionAndRotation(Vector3.zero, Quaternion.identity);
            _projectorCamera.cullingMask           = projectorLayerMask;
            _projectorCamera.usePhysicalProperties = false;

            _chessPatternTransform = GameObject.CreatePrimitive(PrimitiveType.Quad).transform;
            _chessPatternTransform.SetParent(_calibrationBoardTransform);
            _chessPatternTransform.name = "Chessboard";
            Material chessboardMaterial = new Material(unlitTextureShader);

            chessboardMaterial.mainTexture = _chessPatternTexture;
            _chessPatternTransform.GetComponent <Renderer>().material = chessboardMaterial;
            float chessTileSizeMeters = _chessTileSize * 0.001f;

            _chessPatternTransform.localScale = new Vector3((_chessPatternSize.x - 1) * chessTileSizeMeters, (_chessPatternSize.y - 1) * chessTileSizeMeters, 0);
            TrackingToolsHelper.UpdateWorldSpacePatternPoints(_chessPatternSize, _chessPatternTransform.localToWorldMatrix, TrackingToolsHelper.PatternType.Chessboard, Vector2.zero, ref _chessCornersWorldMat);

            _circlePatternTransform      = GameObject.CreatePrimitive(PrimitiveType.Quad).transform;
            _circlePatternTransform.name = "Circlesboard";
            _circlePatternBoardMaterial  = new Material(unlitTintedTextureShader);
            _circlePatternTransform.GetComponent <Renderer>().material = _circlePatternBoardMaterial;
            _circlePatternTransform.position         = Vector3.forward;
            _circlePatternTransform.gameObject.layer = projectorLayer;

            _precisionDotsContainerObject = TrackingToolsHelper.CreatePrecisionTestDots(_calibrationBoardTransform, projectorLayer, _chessPatternSize, chessTileSizeMeters);
            _precisionDotsContainerObject.SetActive(false);

            _projectorSampleMeterTransform = GameObject.CreatePrimitive(PrimitiveType.Quad).transform;
            _projectorSampleMeterTransform.gameObject.layer = projectorLayer;
            _projectorSampleMeterTransform.name             = "ProjectorSampleMeter";
            _projectorSampleMeterTransform.localScale       = new Vector3(_chessPatternTransform.localScale.x, TrackingToolsConstants.precisionTestDotSize, 0);
            _projectorSampleMeterTransform.SetParent(_calibrationBoardTransform);
            float dotOffsetY = ((_chessPatternSize.y - 4) * 0.5f + 1) * chessTileSizeMeters;

            _projectorSampleMeterTransform.localPosition = new Vector3(0, -dotOffsetY - chessTileSizeMeters);
            Material sampleMeterMaterial = new Material(unlitColorShader);

            _projectorSampleMeterTransform.GetComponent <Renderer>().sharedMaterial = sampleMeterMaterial;
            _projectorSampleMeterTransform.gameObject.SetActive(false);

            _intrinsicsErrorText.gameObject.SetActive(false);
            _extrinsicsErrorText.gameObject.SetActive(false);
            _saveButton.gameObject.SetActive(false);
            _undoSampleButton.gameObject.SetActive(false);

            _screenBorderMaterial = new Material(unlitColorShader);

            _circlePatternToWorldPrevFrame = Matrix4x4.identity;

            _previewFlasher = new MaterialPropFlasher(_previewMaterial, "_Whiteout", TrackingToolsConstants.flashDuration);
            UpdateSampleCounterUI();

            // Subscribe.
            _circlePatternScaleSlider.onValueChanged.AddListener(( float v ) => _manualCirclePatternTransformationRequested   = true);            //OnCirclePatternScaleSliderChanged );
            _circlePatternOffsetXSlider.onValueChanged.AddListener(( float v ) => _manualCirclePatternTransformationRequested = true);
            _circlePatternOffsetYSlider.onValueChanged.AddListener(( float v ) => _manualCirclePatternTransformationRequested = true);
            _saveButton.onClick.AddListener(SaveToFiles);
            _undoSampleButton.onClick.AddListener(UndoSample);
            if (_manualSampleButton)
            {
                _manualSampleButton.onClick.AddListener(() => _sampleManuallyRequested = true);;
            }
        }
Пример #42
0
        /// <summary>
        /// projects points from the model coordinate space to the image coordinates. 
        /// Also computes derivatives of the image coordinates w.r.t the intrinsic 
        /// and extrinsic camera parameters
        /// </summary>
        /// <param name="objectPoints">Array of object points, 3xN/Nx3 1-channel or 
        /// 1xN/Nx1 3-channel, where N is the number of points in the view.</param>
        /// <param name="rvec">Rotation vector (3x1).</param>
        /// <param name="tvec">Translation vector (3x1).</param>
        /// <param name="cameraMatrix">Camera matrix (3x3)</param>
        /// <param name="distCoeffs">Input vector of distortion coefficients 
        /// (k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6]]) of 4, 5, or 8 elements. 
        /// If the vector is null, the zero distortion coefficients are assumed.</param>
        /// <param name="imagePoints">Output array of image points, 2xN/Nx2 1-channel 
        /// or 1xN/Nx1 2-channel</param>
        /// <param name="jacobian">Optional output 2Nx(10 + numDistCoeffs) jacobian matrix 
        /// of derivatives of image points with respect to components of the rotation vector, 
        /// translation vector, focal lengths, coordinates of the principal point and 
        /// the distortion coefficients. In the old interface different components of 
        /// the jacobian are returned via different output parameters.</param>
        /// <param name="aspectRatio">Optional “fixed aspect ratio” parameter. 
        /// If the parameter is not 0, the function assumes that the aspect ratio (fx/fy) 
        /// is fixed and correspondingly adjusts the jacobian matrix.</param>
        public static void ProjectPoints(IEnumerable<Point3d> objectPoints,
                                         double[] rvec, double[] tvec,
                                         double[,] cameraMatrix, double[] distCoeffs,
                                         out Point2d[] imagePoints,
                                         out double[,] jacobian,
                                         double aspectRatio = 0)
        {
            if (objectPoints == null)
                throw new ArgumentNullException("objectPoints");
            if (rvec == null)
                throw new ArgumentNullException("rvec");
            if (rvec.Length != 3)
                throw new ArgumentException("rvec.Length != 3");
            if (tvec == null)
                throw new ArgumentNullException("tvec");
            if (tvec.Length != 3)
                throw new ArgumentException("tvec.Length != 3");
            if (cameraMatrix == null)
                throw new ArgumentNullException("cameraMatrix");
            if (cameraMatrix.GetLength(0) != 3 || cameraMatrix.GetLength(1) != 3)
                throw new ArgumentException("cameraMatrix must be double[3,3]");

            Point3d[] objectPointsArray = EnumerableEx.ToArray(objectPoints);
            using (var objectPointsM = new Mat(objectPointsArray.Length, 1, MatType.CV_64FC3, objectPointsArray))
            using (var rvecM = new Mat(3, 1, MatType.CV_64FC1, rvec))
            using (var tvecM = new Mat(3, 1, MatType.CV_64FC1, tvec))
            using (var cameraMatrixM = new Mat(3, 3, MatType.CV_64FC1, cameraMatrix))
            using (var imagePointsM = new MatOfPoint2d())
            {
                var distCoeffsM = new Mat();
                if (distCoeffs != null)
                    distCoeffsM = new Mat(distCoeffs.Length, 1, MatType.CV_64FC1, distCoeffs);
                var jacobianM = new MatOfDouble();

                NativeMethods.calib3d_projectPoints_Mat(objectPointsM.CvPtr,
                    rvecM.CvPtr, tvecM.CvPtr, cameraMatrixM.CvPtr, distCoeffsM.CvPtr,
                    imagePointsM.CvPtr, jacobianM.CvPtr, aspectRatio);

                imagePoints = imagePointsM.ToArray();
                jacobian = jacobianM.ToRectangularArray();
            }
        }
Пример #43
0
        void Awake()
        {
            // Create UI.
            if (!_containerUI)
            {
                _containerUI = new GameObject("CameraPoser").AddComponent <RectTransform>();
                _containerUI.transform.SetParent(_canvas.transform);
            }
            CanvasGroup wrapperGroup = _containerUI.GetComponent <CanvasGroup>();

            if (!wrapperGroup)
            {
                wrapperGroup = _containerUI.gameObject.AddComponent <CanvasGroup>();
            }
            wrapperGroup.alpha = _alpha;
            Image backgroundImage = new GameObject("Background").AddComponent <Image>();

            backgroundImage.transform.SetParent(_containerUI.transform);
            _rawImageUI = new GameObject("CameraImage").AddComponent <RawImage>();
            _rawImageUI.transform.SetParent(_containerUI.transform);
            _rawImageUI.uvRect         = _flipTexture ? new UnityEngine.Rect(0, 1, 1, -1) : new UnityEngine.Rect(0, 0, 1, 1);
            _rawImageRect              = _rawImageUI.GetComponent <RectTransform>();
            _uiMaterial                = new Material(Shader.Find("Hidden/SingleChannelTexture"));
            _rawImageUI.material       = _uiMaterial;
            _aspectFitterUI            = _rawImageUI.gameObject.AddComponent <AspectRatioFitter>();
            _aspectFitterUI.aspectMode = AspectRatioFitter.AspectMode.HeightControlsWidth;
            backgroundImage.color      = Color.black;
            ExpandRectTransform(_containerUI);
            ExpandRectTransform(backgroundImage.GetComponent <RectTransform>());
            ExpandRectTransform(_rawImageRect);
            _userPointRects  = new RectTransform[pointCount];
            _userPointImages = new Image[pointCount];
            for (int p = 0; p < pointCount; p++)
            {
                GameObject pointObject = new GameObject("Point" + p);
                pointObject.transform.SetParent(_rawImageRect);
                Image pointImage = pointObject.AddComponent <Image>();
                pointImage.color = Color.cyan;
                RectTransform pointRect = pointObject.GetComponent <RectTransform>();
                pointRect.sizeDelta = Vector2.one * 5;
                SetAnchoredPosition(pointRect, defaultPointPositions[p]);
                pointRect.anchoredPosition = Vector3.zero;
                Text pointLabel = new GameObject("Label").AddComponent <Text>();
                pointLabel.text = p.ToString();
                pointLabel.transform.SetParent(pointRect);
                pointLabel.rectTransform.anchoredPosition = Vector2.zero;
                pointLabel.rectTransform.sizeDelta        = new Vector2(_fontSize, _fontSize) * 2;
                pointLabel.font     = _font;
                pointLabel.fontSize = _fontSize;
                _userPointRects[p]  = pointRect;
                _userPointImages[p] = pointImage;
            }

            // Hide.
            //if( !_interactable ) _containerUI.transform.gameObject.SetActive( false );

            // Prepare OpenCV.
            _noDistCoeffs         = new MatOfDouble(new double[] { 0, 0, 0, 0 });
            _rVec                 = new Mat();
            _tVec                 = new Mat();
            _anchorPointsImage    = new Point[pointCount];
            _anchorPointsWorld    = new Point3[pointCount];
            _anchorPointsImageMat = new MatOfPoint2f();
            _anchorPointsWorldMat = new MatOfPoint3f();
            _anchorPointsImageMat.alloc(pointCount);
            _anchorPointsWorldMat.alloc(pointCount);
            for (int p = 0; p < pointCount; p++)
            {
                _anchorPointsImage[p] = new Point();
                _anchorPointsWorld[p] = new Point3();
            }

            // Load files.
            if (Intrinsics.TryLoadFromFile(_intrinsicsFileName, out _intrinsics))
            {
                enabled = false;
                return;
            }
            LoadCircleAnchorPoints();

            // Update variables.
            if (!Application.isEditor)
            {
                OnValidate();
            }
        }
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            colors  = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()];
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);



            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float imageScale  = 1.0f;
            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            //set cameraparam
            int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols());

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, max_d);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, max_d);
            camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            //calibration camera
            Size   imageSize      = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point();

            double[] aspectratio = new double[1];


            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);

            //Adjust Unity Camera FOV
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)fovx [0];
            }
            else
            {
                ARCamera.fieldOfView = (float)fovy [0];
            }



            MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length];
            for (int i = 0; i < markerDesigns.Length; i++)
            {
                markerDesigns [i] = markerSettings [i].markerDesign;
            }

            markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns);



            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            //if WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }
        }
Пример #45
0
        /// <summary>
        /// Nickの手法による二値化処理を行う。
        /// </summary>
        /// <param name="imgSrc">入力画像</param>
        /// <param name="imgDst">出力画像</param>
        /// <param name="kernelSize">局所領域のサイズ</param>
        /// <param name="k">係数</param>
#else
        /// <summary>
        /// Binarizes by Nick's method
        /// </summary>
        /// <param name="src">Input image</param>
        /// <param name="dst">Output image</param>
        /// <param name="kernelSize">Window size</param>
        /// <param name="k">Adequate coefficient</param>
#endif
        public static void Nick(Mat src, Mat dst, int kernelSize, double k)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }

            // グレースケールのみ
            if (src.Type() != MatType.CV_8UC1)
            {
                throw new ArgumentException("src must be gray scale image");
            }
            if (dst.Type() != MatType.CV_8UC1)
            {
                throw new ArgumentException("dst must be gray scale image");
            }

            // サイズのチェック
            if (kernelSize < 3)
            {
                throw new ArgumentOutOfRangeException("kernelSize", "size must be 3 and above");
            }
            if (kernelSize % 2 == 0)
            {
                throw new ArgumentOutOfRangeException("kernelSize", "size must be odd number");
            }

            int borderSize = kernelSize / 2;
            int width      = src.Width;
            int height     = src.Height;

            dst.Create(src.Size(), src.Type());

            using (var tempMat = new Mat(height + (borderSize * 2), width + (borderSize * 2), src.Type()))
                using (var sumMat = new Mat(tempMat.Height + 1, tempMat.Width + 1, MatType.CV_64FC1, 1))
                    using (var sqSumMat = new Mat(tempMat.Height + 1, tempMat.Width + 1, MatType.CV_64FC1, 1))
                    {
                        Cv2.CopyMakeBorder(src, tempMat, borderSize, borderSize, borderSize, borderSize, BorderTypes.Replicate, Scalar.All(0));
                        Cv2.Integral(tempMat, sumMat, sqSumMat);

                        using (var tSrcMat = new MatOfByte(src))
                            using (var tDstMat = new MatOfByte(dst))
                                using (var tSumMat = new MatOfDouble(sumMat))
                                    using (var tSqSumMat = new MatOfDouble(sqSumMat))
                                    {
                                        var tSrc   = tSrcMat.GetIndexer();
                                        var tDst   = tDstMat.GetIndexer();
                                        var tSum   = tSumMat.GetIndexer();
                                        var tSqSum = tSqSumMat.GetIndexer();

                                        int ylim         = height + borderSize;
                                        int xlim         = width + borderSize;
                                        int kernelPixels = kernelSize * kernelSize;
                                        for (int y = borderSize; y < ylim; y++)
                                        {
                                            for (int x = borderSize; x < xlim; x++)
                                            {
                                                int    x1    = x - borderSize;
                                                int    y1    = y - borderSize;
                                                int    x2    = x + borderSize + 1;
                                                int    y2    = y + borderSize + 1;
                                                double sum   = tSum[y2, x2] - tSum[y2, x1] - tSum[y1, x2] + tSum[y1, x1];
                                                double sqsum = tSqSum[y2, x2] - tSqSum[y2, x1] - tSqSum[y1, x2] + tSqSum[y1, x1];
                                                double mean  = sum / kernelPixels;
                                                double term  = (sqsum - mean * mean) / kernelPixels;
                                                if (term < 0.0)
                                                {
                                                    term = 0.0;
                                                }
                                                term = Math.Sqrt(term);

                                                double threshold = mean + k * term;
                                                if (tSrc[y - borderSize, x - borderSize] < threshold)
                                                {
                                                    tDst[y - borderSize, x - borderSize] = 0;
                                                }
                                                else
                                                {
                                                    tDst[y - borderSize, x - borderSize] = 255;
                                                }
                                            }
                                        }
                                    }
                    }
        }
Пример #46
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat   rawSizeMat    = webCamTextureToMatHelper.GetMat();
            float rawSizeWidth  = rawSizeMat.width();
            float rawSizeHeight = rawSizeMat.height();

            Mat webCamTextureMat = imageOptimizationHelper.GetDownScaleMat(rawSizeMat);

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            texture = new Texture2D((int)width, (int)height, TextureFormat.RGB24, false);

            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(1, height / width, 1);
            previewQuad.SetActive(displayCameraPreview);


            // set camera parameters.
            double fx;
            double fy;
            double cx;
            double cy;

            string loadDirectoryPath               = Path.Combine(Application.persistentDataPath, "HoloLensArUcoCameraCalibrationExample");
            string calibratonDirectoryName         = "camera_parameters" + rawSizeWidth + "x" + rawSizeHeight;
            string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
            string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");

            if (useStoredCameraParameters && File.Exists(loadPath))
            {
                CameraParameters param;
                XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));
                using (var stream = new FileStream(loadPath, FileMode.Open)) {
                    param = (CameraParameters)serializer.Deserialize(stream);
                }

                fx = param.camera_matrix[0];
                fy = param.camera_matrix[4];
                cx = param.camera_matrix[2] / imageOptimizationHelper.downscaleRatio;
                cy = param.camera_matrix[5] / imageOptimizationHelper.downscaleRatio;

                camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                camMatrix.put(0, 0, fx);
                camMatrix.put(0, 1, 0);
                camMatrix.put(0, 2, cx);
                camMatrix.put(1, 0, 0);
                camMatrix.put(1, 1, fy);
                camMatrix.put(1, 2, cy);
                camMatrix.put(2, 0, 0);
                camMatrix.put(2, 1, 0);
                camMatrix.put(2, 2, 1.0f);

                distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());

                Debug.Log("Loaded CameraParameters from a stored XML file.");
                Debug.Log("loadPath: " + loadPath);
            }
            else
            {
                fx = this.fx;
                fy = this.fy;
                cx = this.cx / imageOptimizationHelper.downscaleRatio;
                cy = this.cy / imageOptimizationHelper.downscaleRatio;

                camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                camMatrix.put(0, 0, fx);
                camMatrix.put(0, 1, 0);
                camMatrix.put(0, 2, cx);
                camMatrix.put(1, 0, 0);
                camMatrix.put(1, 1, fy);
                camMatrix.put(1, 2, cy);
                camMatrix.put(2, 0, 0);
                camMatrix.put(2, 1, 0);
                camMatrix.put(2, 2, 1.0f);

                distCoeffs = new MatOfDouble(this.distCoeffs1, this.distCoeffs2, this.distCoeffs3, this.distCoeffs4, this.distCoeffs5);

                Debug.Log("Created a dummy CameraParameters.");
            }

            Debug.Log("camMatrix " + camMatrix.dump());
            Debug.Log("distCoeffs " + distCoeffs.dump());


            //Calibration camera
            Size   imageSize      = new Size(width, height);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);

            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;

            grayMat         = new Mat();
            ids             = new Mat();
            corners         = new List <Mat> ();
            rejectedCorners = new List <Mat> ();
            rvecs           = new Mat();
            tvecs           = new Mat();
            rotMat          = new Mat(3, 3, CvType.CV_64FC1);


            transformationM = new Matrix4x4();

            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());

            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary(Aruco.DICT_6X6_250);


            //If WebCamera is frontFaceing,flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }

            downScaleFrameMat = new Mat((int)height, (int)width, CvType.CV_8UC4);
            rgbMat4preview    = new Mat();
        }
				private IEnumerator init ()
				{
						axes.SetActive (false);
						head.SetActive (false);
						rightEye.SetActive (false);
						leftEye.SetActive (false);
						mouth.SetActive (false);
			

						if (webCamTexture != null) {
								faceTracker.reset ();
								
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
								grayMat.Dispose ();
								cascade.Dispose ();
								camMatrix.Dispose ();
								distCoeffs.Dispose ();

						}
			
						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];

										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

										break;
								}
						}
			
						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();


						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
								#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();

										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										if (cascade.empty ()) {
												Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
										}


										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
					
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];
					
					
					
					
										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
					
										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);
					
					
										ARCamera.fieldOfView = (float)fovy [0];
					
										Debug.Log ("camMatrix " + camMatrix.dump ());
					
					
										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());
					
					
					
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());
					
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}