// Start is called before the first frame update void Start() { var devices = WebCamTexture.devices; webCamDevice = devices[0]; webCamTexture = new WebCamTexture(webCamDevice.name); webCamTexture.Play(); while (true) { if (webCamTexture.didUpdateThisFrame) { OnInited(); break; } } List <Point> points = new List <Point>(); points.Add(new Point(0, 260)); points.Add(new Point(880, 260)); points.Add(new Point(880, 0)); points.Add(new Point(0, 0)); MatOfPoint2f pts2 = new MatOfPoint2f(); pts2.fromList(points); double[] p1_1 = pts2.get(0, 0); print(string.Join(" ", p1_1)); double[] p3_1 = pts2.get(2, 0); print(string.Join(" ", p3_1)); double[] p2_1 = pts2.get(1, 0); print(string.Join(" ", p2_1)); }
public static Vector2 ReadVector2(this MatOfPoint2f vectorArrayMat, int index) { switch (vectorArrayMat.depth()) { case CvType.CV_64F: vectorArrayMat.get(index, 0, _temp2d); return(new Vector2((float)_temp2d[0], (float)_temp2d[1])); case CvType.CV_32F: vectorArrayMat.get(index, 0, _temp2f); return(new Vector2(_temp2f[0], _temp2f[1])); } return(Vector2.zero); }
/// <summary> /// Converts the screen point to texture point. /// </summary> /// <param name="screenPoint">Screen point.</param> /// <param name="dstPoint">Dst point.</param> /// <param name="texturQuad">Texture quad.</param> /// <param name="textureWidth">Texture width.</param> /// <param name="textureHeight">Texture height.</param> /// <param name="camera">Camera.</param> private void ConvertScreenPointToTexturePoint(Point screenPoint, Point dstPoint, GameObject textureQuad, int textureWidth = -1, int textureHeight = -1, Camera camera = null) { if (textureWidth < 0 || textureHeight < 0) { Renderer r = textureQuad.GetComponent <Renderer>(); if (r != null && r.material != null && r.material.mainTexture != null) { textureWidth = r.material.mainTexture.width; textureHeight = r.material.mainTexture.height; } else { textureWidth = (int)textureQuad.transform.localScale.x; textureHeight = (int)textureQuad.transform.localScale.y; } } if (camera == null) { camera = Camera.main; } Vector3 quadPosition = textureQuad.transform.localPosition; Vector3 quadScale = textureQuad.transform.localScale; Vector2 tl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z)); Vector2 tr = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z)); Vector2 br = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z)); Vector2 bl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z)); using (Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2)) using (Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2)) { srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y); dstRectMat.put(0, 0, 0, 0, quadScale.x, 0, quadScale.x, quadScale.y, 0, quadScale.y); using (Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat)) using (MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint)) using (MatOfPoint2f dstPointMat = new MatOfPoint2f()) { Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform); dstPoint.x = dstPointMat.get(0, 0)[0] * textureWidth / quadScale.x; dstPoint.y = dstPointMat.get(0, 0)[1] * textureHeight / quadScale.y; } } }
void getChessBoardWorldPositions(MatOfPoint2f positions, int pin1idx, float distBetweenCorners, ref Point[] realWorldPointArray, ref Point3[] realWorldPointArray3, ref Point[] imagePointArray) { //i want a list of 2d points and a corresponding list of 3d points: //float distBetweenCorners = 0.0498f; //meters realWorldPointArray = new Point[positions.rows()]; realWorldPointArray3 = new Point3[positions.rows()]; imagePointArray = new Point[positions.rows()]; for (int i = 0; i < positions.rows(); i++) { double xp = 0.0; double zp = 0.0; double yp = 0.0; if (pin1idx == 0) { xp = (i % 7); zp = -((int)(i / 7)); } if (pin1idx == 6) { xp = (i / 7); zp = ((int)(i % 7)) - 6; } if (pin1idx == 42) { xp = -((int)i / 7) + 6; zp = -(i % 7); } if (pin1idx == 48) { xp = -(i % 7) + 6; zp = ((int)i / 7) - 6; } xp = xp * distBetweenCorners; zp = zp * distBetweenCorners; realWorldPointArray[i] = new Point(xp, zp); realWorldPointArray3[i] = new Point3(xp, zp, 0.0); imagePointArray[i] = new Point(positions.get(i, 0)[0], positions.get(i, 0)[1]); //calibPointList2.Add(new Point(positions.get(i, 0)[0], positions.get(i, 0)[1])); //calibPointList3.Add(new Point3(xp,0.0,zp)); } }
int getPin1(MatOfPoint2f positions, Vector2[] blobs, ref Point closestBlob) { Vector2 chessPosition = new Vector2((float)positions.get(0, 0)[0], (float)positions.get(0, 0)[1]); int closest = 0; float closestDist = (blobs[0] - chessPosition).magnitude; for (int i = 0; i < blobs.Length; i++) { for (int ch = 0; ch < positions.rows(); ch++) { chessPosition = new Vector2((float)positions.get(ch, 0)[0], (float)positions.get(ch, 0)[1]); if ((blobs[i] - chessPosition).magnitude < closestDist) { closest = i; closestDist = (blobs[i] - chessPosition).magnitude; } } } closestBlob = new Point(blobs[closest].x, blobs[closest].y); //find pin1! int pin1idx = 0; float pin1dist = (blobs[closest] - (new Vector2((float)positions.get(0, 0)[0], (float)positions.get(0, 0)[1]))).magnitude; for (int ch = 0; ch < positions.rows(); ch++) { if (ch == 0 || ch == 6 || ch == 42 || ch == 48) { float newDist = (blobs[closest] - (new Vector2((float)positions.get(ch, 0)[0], (float)positions.get(ch, 0)[1]))).magnitude; if (newDist < pin1dist) { pin1idx = ch; pin1dist = newDist; } } } //Debug.Log("pin 1 idx: " + pin1idx); return(pin1idx); }
void Update() { //Access camera image provided by Vuforia Image camImg = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888); if (camImg != null) { if (camImageMat == null) { //First time -> instantiate camera image specific data camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4); //Note: rows=height, cols=width } camImageMat.put(0, 0, camImg.Pixels); //Replace with your own projection matrix. This approach only uses fy. cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg; Vector3 worldPnt1 = corner1.transform.position; Vector3 worldPnt2 = corner2.transform.position; Vector3 worldPnt3 = corner3.transform.position; Vector3 worldPnt4 = corner4.transform.position; //See lecture slides Matrix4x4 Rt = cam.transform.worldToLocalMatrix; Matrix4x4 A = Matrix4x4.identity; A.m00 = fx; A.m11 = fy; A.m02 = cx; A.m12 = cy; Matrix4x4 worldToImage = A * Rt; Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1); Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2); Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3); Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4); //hUV are the image coordinates in 2D homogeneous coordinates, we need to normalize, i.e., divide by Z Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z; Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z; Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z; Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z; //don't forget to alloc before putting values into a MatOfPoint2f imagePoints.put(0, 0, uv1.x, camImg.Height - uv1.y); imagePoints.put(1, 0, uv2.x, camImg.Height - uv2.y); imagePoints.put(2, 0, uv3.x, camImg.Height - uv3.y); imagePoints.put(3, 0, uv4.x, camImg.Height - uv4.y); //Debug draw points Point imgPnt1 = new Point(imagePoints.get(0, 0)); Point imgPnt2 = new Point(imagePoints.get(1, 0)); Point imgPnt3 = new Point(imagePoints.get(2, 0)); Point imgPnt4 = new Point(imagePoints.get(3, 0)); Imgproc.circle(camImageMat, imgPnt1, 5, new Scalar(255, 0, 0, 255)); Imgproc.circle(camImageMat, imgPnt2, 5, new Scalar(0, 255, 0, 255)); Imgproc.circle(camImageMat, imgPnt3, 5, new Scalar(0, 0, 255, 255)); Imgproc.circle(camImageMat, imgPnt4, 5, new Scalar(255, 255, 0, 255)); Scalar lineCl = new Scalar(200, 120, 0, 160); Imgproc.line(camImageMat, imgPnt1, imgPnt2, lineCl); Imgproc.line(camImageMat, imgPnt2, imgPnt3, lineCl); Imgproc.line(camImageMat, imgPnt3, imgPnt4, lineCl); Imgproc.line(camImageMat, imgPnt4, imgPnt1, lineCl); var destPoints = new MatOfPoint2f(); // Creating a destination destPoints.alloc(4); destPoints.put(0, 0, width, 0); destPoints.put(1, 0, width, height); destPoints.put(2, 0, 0, height); destPoints.put(3, 0, 0, 0); var homography = Calib3d.findHomography(imagePoints, destPoints); // Finding the image Imgproc.warpPerspective(camImageMat, destPoints, homography, new Size(camImageMat.width(), camImageMat.height())); unwarpedTexture = unwarpedTextureClean; MatDisplay.MatToTexture(destPoints, ref unwarpedTexture); // Take output and transform into texture if (Input.GetKey("space")) { fish.GetComponent <Renderer>().material.mainTexture = unwarpedTexture; } else { fish.GetComponent <Renderer>().material.mainTexture = tex; } MatDisplay.DisplayMat(destPoints, MatDisplaySettings.BOTTOM_LEFT); MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND); } }
void handleCalibration() { for (int i = 0; i < AK_receiver.GetComponent <akplay>().camInfoList.Count; i++) { //create color mat: byte[] colorBytes = ((Texture2D)(AK_receiver.GetComponent <akplay>().camInfoList[i].colorCube.GetComponent <Renderer>().material.mainTexture)).GetRawTextureData(); GCHandle ch = GCHandle.Alloc(colorBytes, GCHandleType.Pinned); Mat colorMat = new Mat(AK_receiver.GetComponent <akplay>().camInfoList[i].color_height, AK_receiver.GetComponent <akplay>().camInfoList[i].color_width, CvType.CV_8UC4); Utils.copyToMat(ch.AddrOfPinnedObject(), colorMat); ch.Free(); //OpenCVForUnity.CoreModule.Core.flip(colorMat, colorMat, 0); //detect a chessboard in the image, and refine the points, and save the pixel positions: MatOfPoint2f positions = new MatOfPoint2f(); int resizer = 4; resizer = 1; //noresize! Mat colorMatSmall = new Mat(); //~27 ms each Imgproc.resize(colorMat, colorMatSmall, new Size(colorMat.cols() / resizer, colorMat.rows() / resizer)); bool success = Calib3d.findChessboardCorners(colorMatSmall, new Size(7, 7), positions); for (int ss = 0; ss < positions.rows(); ss++) { double[] data = positions.get(ss, 0); data[0] = data[0] * resizer; data[1] = data[1] * resizer; positions.put(ss, 0, data); } //subpixel, drawing chessboard, and getting orange blobs takes 14ms TermCriteria tc = new TermCriteria(); Imgproc.cornerSubPix(colorMat, positions, new Size(5, 5), new Size(-1, -1), tc); Mat chessboardResult = new Mat(); colorMat.copyTo(chessboardResult); Calib3d.drawChessboardCorners(chessboardResult, new Size(7, 7), positions, success); //Find the orange blobs: Mat orangeMask = new Mat(); Vector2[] blobs = getOrangeBlobs(ref colorMat, ref orangeMask); //find blob closest to chessboard if (success && (blobs.Length > 0)) { Debug.Log("found a chessboard and blobs for camera: " + i); // time to get pin1 and chessboard positions: 27ms //find pin1: Point closestBlob = new Point(); int pin1idx = getPin1(positions, blobs, ref closestBlob); Imgproc.circle(chessboardResult, new Point(positions.get(pin1idx, 0)[0], positions.get(pin1idx, 0)[1]), 10, new Scalar(255, 0, 0), -1); Imgproc.circle(chessboardResult, closestBlob, 10, new Scalar(255, 255, 0), -1); //get world positions of chessboard Point[] realWorldPointArray = new Point[positions.rows()]; Point3[] realWorldPointArray3 = new Point3[positions.rows()]; Point[] imagePointArray = new Point[positions.rows()]; //getChessBoardWorldPositions(positions, pin1idx, 0.0498f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //green and white checkerboard. getChessBoardWorldPositions(positions, pin1idx, 0.07522f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //black and white checkerboard. string text = ""; float decimals = 1000.0f; int text_red = 255; int text_green = 0; int text_blue = 0; text = ((int)(realWorldPointArray3[0].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(0, 0)[0], positions.get(0, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[6].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(6, 0)[0], positions.get(6, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[42].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(42, 0)[0], positions.get(42, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[48].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].z * decimals)) / decimals; //text = sprintf("%2.2f,%2.2f,%2.2f", realWorldPointArray3[48].x, realWorldPointArray3[48].y, realWorldPointArray3[48].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(48, 0)[0], positions.get(48, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); Mat cameraMatrix = Mat.eye(3, 3, CvType.CV_64F); cameraMatrix.put(0, 0, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fx); cameraMatrix.put(1, 1, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fy); cameraMatrix.put(0, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cx); cameraMatrix.put(1, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cy); double[] distortion = new double[8]; distortion[0] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k1; distortion[1] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k2; distortion[2] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p1; distortion[3] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p2; distortion[4] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k3; distortion[5] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k4; distortion[6] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k5; distortion[7] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k6; /* * distortion[0] = 0.0; * distortion[1] = 0.0; * distortion[2] = 0.0; * distortion[3] = 0.0; * distortion[4] = 0.0; * distortion[5] = 0.0; * distortion[6] = 0.0; * distortion[7] = 0.0; */ //~1 ms to solve for pnp Mat rvec = new Mat(); Mat tvec = new Mat(); bool solvepnpSucces = Calib3d.solvePnP(new MatOfPoint3f(realWorldPointArray3), new MatOfPoint2f(imagePointArray), cameraMatrix, new MatOfDouble(distortion), rvec, tvec); Mat R = new Mat(); Calib3d.Rodrigues(rvec, R); //calculate unity vectors, and camera transforms Mat camCenter = -R.t() * tvec; Mat forwardOffset = new Mat(3, 1, tvec.type()); forwardOffset.put(0, 0, 0); forwardOffset.put(1, 0, 0); forwardOffset.put(2, 0, 1); Mat upOffset = new Mat(3, 1, tvec.type()); upOffset.put(0, 0, 0); upOffset.put(1, 0, -1); upOffset.put(2, 0, 0); Mat forwardVectorCV = R.t() * (forwardOffset - tvec); forwardVectorCV = forwardVectorCV - camCenter; Mat upVectorCV = R.t() * (upOffset - tvec); upVectorCV = upVectorCV - camCenter; Vector3 forwardVectorUnity = new Vector3((float)forwardVectorCV.get(0, 0)[0], (float)forwardVectorCV.get(2, 0)[0], (float)forwardVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system Vector3 upVectorUnity = new Vector3((float)upVectorCV.get(0, 0)[0], (float)upVectorCV.get(2, 0)[0], (float)upVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system Vector3 camCenterUnity = new Vector3((float)camCenter.get(0, 0)[0], (float)camCenter.get(2, 0)[0], (float)camCenter.get(1, 0)[0]); Quaternion rotationUnity = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity); GameObject colorMarker = GameObject.CreatePrimitive(PrimitiveType.Cube); //colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); //colorMarker.transform.parent = AK_receiver.transform; colorMarker.layer = LayerMask.NameToLayer("Debug"); colorMarker.transform.position = camCenterUnity; colorMarker.transform.rotation = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity); colorMarker.GetComponent <Renderer>().material.color = Color.blue; Vector3 forwardDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(forwardVectorUnity); Vector3 upDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(upVectorUnity); Vector3 camCenterDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(camCenterUnity); Quaternion rotationDepth = Quaternion.LookRotation(forwardDepth, upDepth); GameObject depthMarker = GameObject.CreatePrimitive(PrimitiveType.Cube); depthMarker.layer = LayerMask.NameToLayer("Debug"); depthMarker.transform.parent = colorMarker.transform; //depthMarker.transform.localScale = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.lossyScale; depthMarker.transform.localRotation = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.rotation; Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).x, AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).y, AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).z); /* * depthMarker.transform.localRotation = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.rotation; * * Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).x, * AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).y, * AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).z); */ depthMarker.transform.localPosition = -matrixPosition; depthMarker.transform.parent = null; colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); depthMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); //depthMarker.transform.parent = AK_receiver.transform; //depthMarker.transform.position = camCenterDepth; //depthMarker.transform.rotation = Quaternion.LookRotation(forwardDepth-camCenterDepth, upDepth-camCenterDepth); depthMarker.GetComponent <Renderer>().material.color = Color.red; AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.position = depthMarker.transform.position; //need to flip y and z due to unity coordinate system AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.rotation = depthMarker.transform.rotation; } //draw chessboard result to calibration ui: Texture2D colorTexture = new Texture2D(chessboardResult.cols(), chessboardResult.rows(), TextureFormat.BGRA32, false); colorTexture.LoadRawTextureData((IntPtr)chessboardResult.dataAddr(), (int)chessboardResult.total() * (int)chessboardResult.elemSize()); colorTexture.Apply(); checkerboard_display_list[i].GetComponent <Renderer>().material.mainTexture = colorTexture; //draw threshold to calibration ui: Texture2D orangeTexture = new Texture2D(orangeMask.cols(), orangeMask.rows(), TextureFormat.R8, false); orangeTexture.LoadRawTextureData((IntPtr)orangeMask.dataAddr(), (int)orangeMask.total() * (int)orangeMask.elemSize()); orangeTexture.Apply(); threshold_display_list[i].GetComponent <Renderer>().material.mainTexture = orangeTexture; } }
public static Vector2 GetPoint2f(MatOfPoint2f matOfPoint2f, int index) { matOfPoint2f.get(index, 0, tmpPnt); return(new Vector2(tmpPnt[0], tmpPnt[1])); }
void Update() { // Camera image from Vuforia Image camImg = CameraDevice.Instance.GetCameraImage(PIXEL_FORMAT.RGBA8888); if (camImg != null && camImg.Height > 0) { if (camImageMat == null) { // Vuforia seems to enforce a resolution of width=640px for any camera Debug.Log("rows: " + camImg.Height + ", cols: " + camImg.Width); camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4); } // Put Vuforia camera feed pixels into OpenCV display matrix camImageMat.put(0, 0, camImg.Pixels); // DEBUG TEST: In OpenCV, we operate in screen coordinates (pixels), // and we know the resolution of the Vuforia camera // Here, we draw a red circle in screen space using OpenCV //Imgproc.circle(camImageMat, new Point(300, 200), 20, new Scalar(255, 0, 0, 128)); //---- <THIS IS WHERE THE CORNER PROJECTION BEGINS> ---- // Get corner's position in world coordinates Matrix4x4 m1 = corner1.transform.localToWorldMatrix; Matrix4x4 m2 = corner2.transform.localToWorldMatrix; Matrix4x4 m3 = corner3.transform.localToWorldMatrix; Matrix4x4 m4 = corner4.transform.localToWorldMatrix; Vector3 worldPnt1 = m1.MultiplyPoint3x4(corner1.transform.position); Vector3 worldPnt2 = m2.MultiplyPoint3x4(corner2.transform.position); Vector3 worldPnt3 = m3.MultiplyPoint3x4(corner3.transform.position); Vector3 worldPnt4 = m4.MultiplyPoint3x4(corner4.transform.position); // Matrix that goes from world to the camera coordinate system Matrix4x4 Rt = cam.transform.worldToLocalMatrix; // Camera intrinsics Matrix4x4 A = Matrix4x4.identity; A.m00 = fx; A.m11 = fy; A.m02 = cx; A.m12 = cy; //see cheat sheet Matrix4x4 worldToImage = A * Rt; Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1); Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2); Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3); Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4); // Remember that we dealing with homogeneous coordinates. // Here we normalize them to get Image coordinates Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z; Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z; Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z; Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z; // We flip the v-coordinate of our image points to make the Unity (Vuforia) data compatible with OpenCV // Remember that in OpenCV the (0,0) pos is in the top left corner in contrast to the bottom left corner float maxV = camImg.Height - 1; // The -1 is because pixel coordinates are 0-indexed imagePoints.put(0, 0, uv1.x, maxV - uv1.y); imagePoints.put(1, 0, uv2.x, maxV - uv2.y); imagePoints.put(2, 0, uv3.x, maxV - uv3.y); imagePoints.put(3, 0, uv4.x, maxV - uv4.y); Point imgPnt1 = new Point(imagePoints.get(0, 0)); Point imgPnt2 = new Point(imagePoints.get(1, 0)); Point imgPnt3 = new Point(imagePoints.get(2, 0)); Point imgPnt4 = new Point(imagePoints.get(3, 0)); //For debug. Show if impPnti found the right position in img coordinate Imgproc.circle(camImageMat, imgPnt1, 10, new Scalar(255, 0, 0, 200), 5); Imgproc.circle(camImageMat, imgPnt2, 20, new Scalar(255, 255, 0, 255), 5); Imgproc.circle(camImageMat, imgPnt3, 30, new Scalar(0, 255, 0, 255), 5); Imgproc.circle(camImageMat, imgPnt4, 40, new Scalar(0, 0, 255, 255), 4); MatOfPoint2f unwarpPoints; unwarpPoints = new MatOfPoint2f(); unwarpPoints.alloc(4); //according to the resolution unwarpPoints.put(0, 0, 0, 0); unwarpPoints.put(1, 0, 0, 442); unwarpPoints.put(2, 0, 442, 442); unwarpPoints.put(3, 0, 442, 0); //compute homography matrix Mat H = Calib3d.findHomography(imagePoints, unwarpPoints); Mat Hinv = H.inv(); Mat dst = new Mat(442, 442, CvType.CV_8UC4); texMat = MatDisplay.LoadRGBATexture("/models/dog_tex.png"); Imgproc.warpPerspective(texMat, dst, Hinv, new Size(442, 442)); // MatDisplay.MatToTexture(dst, ref tex); //rd.material.mainTexture = tex; //Debug.Log(imgPnt2); //Debug.Log(imgPnt2); //---- </THIS IS WHERE THE CORNER PROJECTION ENDS> ---- // Display the Mat that includes video feed and debug points // Do not forget to disable Vuforia's video background and change your aspect ratio to 4:3! MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND); //---- MATCH INTRINSICS OF REAL CAMERA AND PROJECTION MATRIX OF VIRTUAL CAMERA ---- // See lecture slides for why this formular works. cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg; } }
private Mat findHomographyCustom(MatOfPoint2f imagePoints, MatOfPoint2f destPoints) { var u1 = destPoints.get(0, 0)[0]; var v1 = destPoints.get(0, 0)[1]; var u2 = destPoints.get(1, 0)[0]; var v2 = destPoints.get(1, 0)[1]; var u3 = destPoints.get(2, 0)[0]; var v3 = destPoints.get(2, 0)[1]; var u4 = destPoints.get(3, 0)[0]; var v4 = destPoints.get(3, 0)[1]; var x1 = imagePoints.get(0, 0)[0]; var y1 = imagePoints.get(0, 0)[1]; var x2 = imagePoints.get(1, 0)[0]; var y2 = imagePoints.get(1, 0)[1]; var x3 = imagePoints.get(2, 0)[0]; var y3 = imagePoints.get(2, 0)[1]; var x4 = imagePoints.get(3, 0)[0]; var y4 = imagePoints.get(3, 0)[1]; // First pair matrixA.put(0, 0, x1); matrixA.put(0, 1, y1); matrixA.put(0, 2, 1); matrixA.put(0, 6, -u1 * x1); matrixA.put(0, 7, -u1 * y1); matrixA.put(1, 3, x1); matrixA.put(1, 4, y1); matrixA.put(1, 5, 1); matrixA.put(1, 6, -v1 * x1); matrixA.put(1, 7, -v1 * y1); // Second pair matrixA.put(2, 0, x2); matrixA.put(2, 1, y2); matrixA.put(2, 2, 1); matrixA.put(2, 6, -u2 * x2); matrixA.put(2, 7, -u2 * y2); matrixA.put(3, 3, x2); matrixA.put(3, 4, y2); matrixA.put(3, 5, 1); matrixA.put(3, 6, -v2 * x2); matrixA.put(3, 7, -v2 * y2); // Third pair matrixA.put(4, 0, x3); matrixA.put(4, 1, y3); matrixA.put(4, 2, 1); matrixA.put(4, 6, -u3 * x3); matrixA.put(4, 7, -u3 * y3); matrixA.put(5, 3, x3); matrixA.put(5, 4, y3); matrixA.put(5, 5, 1); matrixA.put(5, 6, -v3 * x3); matrixA.put(5, 7, -v3 * y3); // Forth pair matrixA.put(6, 0, x4); matrixA.put(6, 1, y4); matrixA.put(6, 2, 1); matrixA.put(6, 6, -u4 * x4); matrixA.put(6, 7, -u4 * y4); matrixA.put(7, 3, x4); matrixA.put(7, 4, y4); matrixA.put(7, 5, 1); matrixA.put(7, 6, -v4 * x4); matrixA.put(7, 7, -v4 * y4); Mat matrixB = new Mat(8, 1, CvType.CV_64FC1); // Initialize the b vector matrixB.put(0, 0, u1); matrixB.put(1, 0, v1); matrixB.put(2, 0, u2); matrixB.put(3, 0, v2); matrixB.put(4, 0, u3); matrixB.put(5, 0, v3); matrixB.put(6, 0, u4); matrixB.put(7, 0, v4); // var homography = findHomographyCustom(imagePoints, destPoints); // Finding the image // destPoints = homography * imagePoins; // (u, v) = H * (x, y) // A * v = b Core.solve(matrixA, matrixB, matrixH); Mat homography = new Mat(3, 3, CvType.CV_64FC1); // Reallocate values to a 3x3 matrix homography.put(0, 0, matrixH.get(0, 0)); homography.put(0, 1, matrixH.get(1, 0)); homography.put(0, 2, matrixH.get(2, 0)); homography.put(1, 0, matrixH.get(3, 0)); homography.put(1, 1, matrixH.get(4, 0)); homography.put(1, 2, matrixH.get(5, 0)); homography.put(2, 0, matrixH.get(6, 0)); homography.put(2, 1, matrixH.get(7, 0)); homography.put(2, 2, 1); // Normalize return(homography); }
private void sort(ref MatOfPoint2f fourPoints) { // the argument needs to contain 4 points precisely // sort the bounding box to (topleft, topright, bottomright, bottomleft double[] p1, p2, p3, p4; List <Point> points = new List <Point>(); p1 = new double[2] { fourPoints.get(0, 0)[0], fourPoints.get(0, 0)[1] }; p2 = new double[2] { fourPoints.get(1, 0)[0], fourPoints.get(1, 0)[1] }; p3 = new double[2] { fourPoints.get(2, 0)[0], fourPoints.get(2, 0)[1] }; p4 = new double[2] { fourPoints.get(3, 0)[0], fourPoints.get(3, 0)[1] }; print("p1: " + (char)p1[0] + "," + (char)p1[1]); print("p2: " + (char)p2[0] + "," + (char)p2[1]); print("p3: " + (char)p3[0] + "," + (char)p3[1]); print("p4: " + (char)p4[0] + "," + (char)p4[1]); if (p1[0] < p2[0] && p1[0] < p3[0] || p1[0] < p4[0] && p1[0] < p3[0] || p1[0] < p2[0] && p1[0] < p4[0]) { if (p1[1] < p2[1] && p1[1] < p3[1] || p1[1] < p4[1] && p1[1] < p3[1] || p1[1] < p2[1] && p1[1] < p4[1]) { points.Add(new Point(p1[0], p1[1])); if (p2[1] < p3[1] && p2[1] < p4[1]) { points.Add(new Point(p2[0], p2[1])); if (p3[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p3[0], p3[1])); } else { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p3[1] < p2[1] && p3[1] < p4[1]) { points.Add(new Point(p3[0], p3[1])); if (p2[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p4[1] < p2[1] && p4[1] < p3[1]) { points.Add(new Point(p4[0], p4[1])); if (p2[0] < p3[0]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p3[0], p3[1])); } } } else if (p2[0] < p3[0] && p2[0] < p4[0]) { points.Add(new Point(p2[0], p2[1])); if (p3[1] < p4[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p1[0], p1[1])); } else if (p3[0] < p2[0] && p3[0] < p4[0]) { points.Add(new Point(p3[0], p3[1])); if (p2[1] < p4[1]) { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p2[0], p2[1])); } points.Add(new Point(p1[0], p1[1])); } else if (p4[0] < p3[0] && p4[0] < p2[0]) { points.Add(new Point(p4[0], p4[1])); if (p3[1] < p2[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p1[0], p1[1])); } } else if (p2[0] < p1[0] && p2[0] < p3[0] || p2[0] < p4[0] && p2[0] < p3[0] || p2[0] < p1[0] && p2[0] < p4[0]) { if (p2[1] < p1[1] && p2[1] < p3[1] || p2[1] < p4[1] && p2[1] < p3[1] || p2[1] < p1[1] && p2[1] < p4[1]) { points.Add(new Point(p2[0], p2[1])); if (p1[1] < p3[1] && p1[1] < p4[1]) { points.Add(new Point(p1[0], p1[1])); if (p3[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p3[0], p3[1])); } else { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p3[1] < p1[1] && p3[1] < p4[1]) { points.Add(new Point(p3[0], p3[1])); if (p1[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p4[1] < p1[1] && p4[1] < p3[1]) { points.Add(new Point(p4[0], p4[1])); if (p1[0] < p3[0]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p3[0], p3[1])); } } } else if (p1[0] < p3[0] && p1[0] < p4[0]) { points.Add(new Point(p1[0], p1[1])); if (p3[1] < p4[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p2[0], p2[1])); } else if (p3[0] < p1[0] && p3[0] < p4[0]) { points.Add(new Point(p3[0], p3[1])); if (p1[1] < p4[1]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p1[0], p1[1])); } points.Add(new Point(p2[0], p2[1])); } else if (p4[0] < p3[0] && p4[0] < p1[0]) { points.Add(new Point(p4[0], p4[1])); if (p3[1] < p1[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p2[0], p2[1])); } } else if (p3[0] < p2[0] && p3[0] < p1[0] || p3[0] < p4[0] && p3[0] < p1[0] || p3[0] < p2[0] && p3[0] < p4[0]) { if (p3[1] < p2[1] && p3[1] < p1[1] || p3[1] < p4[1] && p3[1] < p1[1] || p3[1] < p2[1] && p3[1] < p4[1]) { points.Add(new Point(p3[0], p3[1])); if (p2[1] < p1[1] && p2[1] < p4[1]) { points.Add(new Point(p2[0], p2[1])); if (p1[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p1[1] < p2[1] && p1[1] < p4[1]) { points.Add(new Point(p1[0], p1[1])); if (p2[0] < p4[0]) { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p4[0], p4[1])); } } else if (p4[1] < p2[1] && p4[1] < p1[1]) { points.Add(new Point(p4[0], p4[1])); if (p2[0] < p1[0]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p1[0], p1[1])); } } } else if (p2[0] < p1[0] && p2[0] < p4[0]) { points.Add(new Point(p2[0], p2[1])); if (p1[1] < p4[1]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p1[0], p1[1])); } points.Add(new Point(p3[0], p3[1])); } else if (p1[0] < p2[0] && p1[0] < p4[0]) { points.Add(new Point(p1[0], p1[1])); if (p2[1] < p4[1]) { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p4[0], p4[1])); } else { points.Add(new Point(p4[0], p4[1])); points.Add(new Point(p2[0], p2[1])); } points.Add(new Point(p3[0], p3[1])); } else if (p4[0] < p1[0] && p4[0] < p2[0]) { points.Add(new Point(p4[0], p4[1])); if (p1[1] < p2[1]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p1[0], p1[1])); } points.Add(new Point(p3[0], p3[1])); } } else if (p4[0] < p2[0] && p4[0] < p3[0] || p4[0] < p1[0] && p4[0] < p3[0] || p4[0] < p2[0] && p4[0] < p1[0]) { if (p4[1] < p2[1] && p4[1] < p3[1] || p4[1] < p1[1] && p4[1] < p3[1] || p4[1] < p2[1] && p4[1] < p1[1]) { points.Add(new Point(p4[0], p4[1])); if (p2[1] < p3[1] && p2[1] < p1[1]) { points.Add(new Point(p2[0], p2[1])); if (p3[0] < p1[0]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p3[0], p3[1])); } else { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p1[0], p1[1])); } } else if (p3[1] < p2[1] && p3[1] < p1[1]) { points.Add(new Point(p3[0], p3[1])); if (p2[0] < p1[0]) { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p1[0], p1[1])); } } else if (p1[1] < p2[1] && p1[1] < p3[1]) { points.Add(new Point(p1[0], p1[1])); if (p2[0] < p3[0]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p3[0], p3[1])); } } } else if (p2[0] < p3[0] && p2[0] < p1[0]) { points.Add(new Point(p2[0], p2[1])); if (p3[1] < p1[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p4[0], p4[1])); } else if (p3[0] < p2[0] && p3[0] < p1[0]) { points.Add(new Point(p3[0], p3[1])); if (p2[1] < p1[1]) { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p1[0], p1[1])); } else { points.Add(new Point(p1[0], p1[1])); points.Add(new Point(p2[0], p2[1])); } points.Add(new Point(p4[0], p4[1])); } else if (p1[0] < p3[0] && p1[0] < p2[0]) { points.Add(new Point(p1[0], p1[1])); if (p3[1] < p2[1]) { points.Add(new Point(p3[0], p3[1])); points.Add(new Point(p2[0], p2[1])); } else { points.Add(new Point(p2[0], p2[1])); points.Add(new Point(p3[0], p3[1])); } points.Add(new Point(p4[0], p4[1])); } } // MatOfPoint2f pts2 = new MatOfPoint2f(); fourPoints = new MatOfPoint2f(); fourPoints.release(); fourPoints.fromList(points); }