// Use this for initialization void Start() { //srcMat Texture2D srcTexture = Resources.Load ("matchshapes") as Texture2D; Mat srcMat = new Mat (srcTexture.height, srcTexture.width, CvType.CV_8UC1); Utils.texture2DToMat (srcTexture, srcMat); Debug.Log ("srcMat.ToString() " + srcMat.ToString ()); Imgproc.threshold (srcMat, srcMat, 127, 255, Imgproc.THRESH_BINARY); //dstMat Texture2D dstTexture = Resources.Load ("matchshapes") as Texture2D; Mat dstMat = new Mat (dstTexture.height, dstTexture.width, CvType.CV_8UC3); Utils.texture2DToMat (dstTexture, dstMat); Debug.Log ("dstMat.ToString() " + dstMat.ToString ()); List<MatOfPoint> srcContours = new List<MatOfPoint> (); Mat srcHierarchy = new Mat (); /// Find srcContours Imgproc.findContours (srcMat, srcContours, srcHierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_NONE); Debug.Log ("srcContours.Count " + srcContours.Count); for (int i=0; i<srcContours.Count; i++) { Imgproc.drawContours (dstMat, srcContours, i, new Scalar (255, 0, 0), 2, 8, srcHierarchy, 0, new Point ()); } for (int i=0; i<srcContours.Count; i++) { double returnVal = Imgproc.matchShapes (srcContours [1], srcContours [i], Imgproc.CV_CONTOURS_MATCH_I1, 0); Debug.Log ("returnVal " + i + " " + returnVal); Point point = new Point (); float[] radius = new float[1]; Imgproc.minEnclosingCircle (new MatOfPoint2f (srcContours [i].toArray ()), point, radius); Debug.Log ("point.ToString() " + point.ToString ()); Debug.Log ("radius.ToString() " + radius [0]); Imgproc.circle (dstMat, point, 5, new Scalar (0, 0, 255), -1); Imgproc.putText (dstMat, " " + returnVal, point, Core.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar (0, 255, 0), 1, Imgproc.LINE_AA, false); } Texture2D texture = new Texture2D (dstMat.cols (), dstMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (dstMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } markerDetector.processFrame (rgbaMat, 1); //Debug.Log ("markerDetector.getTransformations ().Count " + markerDetector.getTransformations ().Count); for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].gameObject.SetActive (false); } int markerCount = markerDetector.getTransformations ().Count; for (int i = 0; i < markerCount; i++) { if (i > ARCamera.Length - 1) break; ARCamera [i].gameObject.SetActive (true); //Marker to Camera Coordinate System Convert Matrix transformationM = markerDetector.getTransformations () [i]; //Debug.Log ("transformationM " + transformationM.ToString ()); worldToCameraM = lookAtM * transformationM * invertZM; //Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera [i].worldToCameraMatrix = worldToCameraM; } Utils.matToTexture2D (rgbaMat, texture, colors); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } } void OnDisable () { webCamTexture.Stop (); } /// <summary> /// Gets the look at matrix. /// </summary> /// <returns>The look at matrix.</returns> /// <param name="pos">Position.</param> /// <param name="target">Target.</param> /// <param name="up">Up.</param> private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("MarkerBasedARSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } }
// Use this for initialization void Start () { gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1); Camera.main.orthographicSize = imgTexture.height / 2; Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat (imgTexture, imgMat); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); //set cameraparam int max_d = Mathf.Max (imgMat.rows (), imgMat.cols ()); Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, imgMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, imgMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (imgMat.cols (), imgMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV ARCamera.fieldOfView = (float)fovy [0]; // ARCamera.projectionMatrix = ARCamera.projectionMatrix * Matrix4x4.Scale(new Vector3(-1, -1, 1)); // gameObject.transform.localScale = new Vector3 (-1 * gameObject.transform.localScale.x, -1 * gameObject.transform.localScale.y, 1); MarkerDetector markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); markerDetector.processFrame (imgMat, 1); //Marker Coordinate Initial Matrix Matrix4x4 lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //Marker to Camera Coordinate System Convert Matrix if (markerDetector.getTransformations ().Count > 0) { Matrix4x4 transformationM = markerDetector.getTransformations () [0]; Debug.Log ("transformationM " + transformationM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); Matrix4x4 worldToCameraM = lookAtM * transformationM * invertZM; Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera.worldToCameraMatrix = worldToCameraM; } else { Debug.LogWarning ("Marker is not detected"); } Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
private IEnumerator init() { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); // bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } //convert image to greyscale Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); if (faceTracker.getPoints ().Count <= 0) { Debug.Log ("detectFace"); //convert image to greyscale using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) { Imgproc.equalizeHist (grayMat, equalizeHistMat); cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ()); if (faces.rows () > 0) { Debug.Log ("faces " + faces.dump ()); //add initial face points from MatOfRect faceTracker.addPoints (faces); //draw face rect OpenCVForUnity.Rect[] rects = faces.toArray (); for (int i = 0; i < rects.Length; i++) { Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2); } } } } //track face points.if face points <= 0, always return false. if (faceTracker.track (grayMat, faceTrackerParams)) { if (isDrawPoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255)); Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Point[] points = faceTracker.getPoints () [0]; if (points.Length > 0) { // for (int i = 0; i < points.Length; i++) { // Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false); // } imagePoints.fromArray ( points [31],//l eye points [36],//r eye points [67],//nose points [48],//l mouth points [54] //r mouth // , // points [1],//l ear // points [13]//r ear ); Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec); bool isRefresh = false; if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) { isRefresh = true; if (oldRvec == null) { oldRvec = new Mat (); rvec.copyTo (oldRvec); } if (oldTvec == null) { oldTvec = new Mat (); tvec.copyTo (oldTvec); } //filter Rvec Noise. using (Mat absDiffRvec = new Mat ()) { Core.absdiff (rvec, oldRvec, absDiffRvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpRvec = new Mat ()) { Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT); if (Core.countNonZero (cmpRvec) > 0) isRefresh = false; } } //filter Tvec Noise. using (Mat absDiffTvec = new Mat ()) { Core.absdiff (tvec, oldTvec, absDiffTvec); // Debug.Log ("absDiffRvec " + absDiffRvec.dump()); using (Mat cmpTvec = new Mat ()) { Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT); if (Core.countNonZero (cmpTvec) > 0) isRefresh = false; } } } if (isRefresh) { if (!rightEye.activeSelf) rightEye.SetActive (true); if (!leftEye.activeSelf) leftEye.SetActive (true); if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9) || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) { if (!mouth.activeSelf) mouth.SetActive (true); } else { if (mouth.activeSelf) mouth.SetActive (false); } rvec.copyTo (oldRvec); tvec.copyTo (oldTvec); Calib3d.Rodrigues (rvec, rotM); transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0])); transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0])); transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0])); transformationM.SetRow (3, new Vector4 (0, 0, 0, 1)); modelViewMtrx = lookAtM * transformationM * invertZM; ARCamera.worldToCameraMatrix = modelViewMtrx; // Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString()); } } } Utils.matToTexture2D (rgbaMat, texture, colors); } if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) { faceTracker.reset (); if (oldRvec != null) { oldRvec.Dispose (); oldRvec = null; } if (oldTvec != null) { oldTvec.Dispose (); oldTvec = null; } ARCamera.ResetWorldToCameraMatrix (); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); } } void OnDisable () { webCamTexture.Stop (); } private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("FaceTrackerSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } if (GUILayout.Button ("drawPoints")) { if (isDrawPoints) { isDrawPoints = false; } else { isDrawPoints = true; } } if (GUILayout.Button ("axes")) { if (axes.activeSelf) { axes.SetActive (false); } else { axes.SetActive (true); } } if (GUILayout.Button ("head")) { if (head.activeSelf) { head.SetActive (false); } else { head.SetActive (true); } } GUILayout.EndVertical (); } }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; updateLayout (); //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }
// プロセスを起動する private IEnumerator init() { rightEye.SetActive (false); leftEye.SetActive (false); Debug.Log("---------------------------------------------------------------Eye"); Debug.Log(leftEye.transform.localPosition); Debug.Log(rightEye.transform.localPosition); Debug.Log("---------------------------------------------------------------Eye"); if (webCamTexture != null) { faceTracker.reset(); webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); grayMat.Dispose(); cascade.Dispose(); camMatrix.Dispose(); distCoeffs.Dispose(); } // カメラがデバイスで使用可能かチェック for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices[cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices[cameraIndex].name + " isFrontFacing " + WebCamTexture.devices[cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices[cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices[0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // カメラを起動します webCamTexture.Play(); while (true) { // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります. #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer>().material.mainTexture = texture; updateLayout(); cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml")); if (cascade.empty()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, rgbaMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, rgbaMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); // 主点 double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx[0]); Debug.Log("fovy " + fovy[0]); Debug.Log("focalLength " + focalLength[0]); Debug.Log("--------------------------principalPoint"); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("--------------------------principalPoint"); Debug.Log("aspectratio " + aspectratio[0]); ARCamera.fieldOfView = (float)fovy[0]; Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0)); Debug.Log("lookAt " + lookAtM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited () { Debug.Log ("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat (); colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()]; texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1); Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent<Renderer> ().material.mainTexture = texture; grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); if (Screen.height > Screen.width) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); }
private IEnumerator init () { axes.SetActive (false); head.SetActive (false); rightEye.SetActive (false); leftEye.SetActive (false); mouth.SetActive (false); if (webCamTexture != null) { faceTracker.reset (); webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); grayMat.Dispose (); cascade.Dispose (); camMatrix.Dispose (); distCoeffs.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; updateLayout (); cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml")); if (cascade.empty ()) { Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); ARCamera.fieldOfView = (float)fovy [0]; Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }