IEnumerator CalibrateScreenLocation() { //Reset some stuff HyperMegaStuff.HyperMegaLines.drawer.dontClear = false; monitorPattern.SetActive(false); headsetPattern.SetActive(false); for (int i = 0; i < calibrationDevices.Length; i++) { if (calibrationDevices[i].isConnected) { calibrationDevices[i].resetMasks(); } calibrationDevices[i].undistortImage = false; // Stop undistorting the image because it's slow } whiteCircle.gameObject.SetActive(false); //1) Take a background subtraction shot of a black screen yield return(new WaitForSeconds(standardDelay)); float startTime = Time.unscaledTime; while (Time.unscaledTime < startTime + standardDelay * 10f) { //Take the max of the current image against the current subtraction image updateSubtractionBackgrounds(); yield return(null); } yield return(new WaitForSeconds(standardDelay)); //2) Unhide a white object monitorWhiteness.SetActive(true); yield return(new WaitForSeconds(standardDelay)); //3) Hit "Create Mask" // Create the binary masks from the subtracted images createBinaryMasks(monitorMaskThreshold); yield return(new WaitForSeconds(standardDelay)); monitorWhiteness.SetActive(false); //4) Start Moving the Circle Around whiteCircle.SetActive(true); yield return(new WaitForSeconds(standardDelay / 2f)); HyperMegaStuff.HyperMegaLines drawer = HyperMegaStuff.HyperMegaLines.drawer; drawer.dontClear = true; for (int i = 0; i < _realDots.Count; i++) { whiteCircle.transform.position = calibrationDotsParent.GetChild(i).position; int foundDots = 0; while (foundDots == 0) { yield return(new WaitForSeconds(standardDelay)); foundDots = 0; for (int j = 0; j < calibrationDevices.Length; j++) { Vector3 triangulatedDot = triangulate(j, drawer); if (triangulatedDot != Vector3.zero) { calibrationDevices[j].triangulatedDots[i] = triangulatedDot; foundDots++; } } } } whiteCircle.SetActive(false); drawer.dontClear = false; //Kabsch the Dots, move the screen! KabschSolver solver = new KabschSolver(); //Place the monitor based off of one of the webcams List <Vector3> _triangulatedDots = new List <Vector3>(), _monitorDots = new List <Vector3>(); for (int j = 0; j < _realDots.Count; j++) { if (calibrationDevices[0].triangulatedDots[j] != Vector3.zero) { _monitorDots.Add(calibrationDotsParent.GetChild(j).position); _triangulatedDots.Add(calibrationDevices[0].triangulatedDots[j]); } if (calibrationDevices[1].triangulatedDots[j] != Vector3.zero) { _monitorDots.Add(calibrationDotsParent.GetChild(j).position); _triangulatedDots.Add(calibrationDevices[1].triangulatedDots[j]); } } //エラー発見 // CalibrationMonitor.Transform(solver.SolveKabsch(_monitorDots, _triangulatedDots, 200)); for (int j = 0; j < _realDots.Count; j++) { _realDots[j] = calibrationDotsParent.GetChild(j).position; } monitorPattern.SetActive(true); headsetPattern.SetActive(true); }
Vector3 triangulate(int j, HyperMegaStuff.HyperMegaLines drawer = null) { Ray[] rays = new Ray[2]; Mat workingImage = new Mat(calibrationDevices[j].webcam.leftImage.Height, calibrationDevices[j].webcam.leftImage.Width, calibrationDevices[j].webcam.leftImage.Type(), 0); for (int i = 0; i < 2; i++) { Mat curMat = i == 0 ? calibrationDevices[j].webcam.leftImage : calibrationDevices[j].webcam.rightImage; if (calibrationDevices[j].subtractionImage[i] != null) { // Subtract the background from the curMat Cv2.Subtract(curMat, calibrationDevices[j].subtractionImage[i], workingImage); // Threshold the image to separate black and white Cv2.Threshold(workingImage, workingImage, blobThreshold, 255, ThresholdTypes.BinaryInv); // TODO MAKE THRESHOLD TUNABLE // Detect Blobs using the Mask var settings = new SimpleBlobDetector.Params(); settings.FilterByArea = false; settings.FilterByColor = false; settings.FilterByInertia = true; settings.FilterByConvexity = true; settings.FilterByCircularity = false; SimpleBlobDetector detector = SimpleBlobDetector.Create(); KeyPoint[] blobs = detector.Detect(workingImage, calibrationDevices[j].maskImage[i]); Cv2.DrawKeypoints(workingImage, blobs, workingImage, 255); int biggest = -1; float size = 0; for (int k = 0; k < blobs.Length; k++) { if (blobs[k].Size > size) { biggest = k; size = blobs[k].Size; } } // If there's only one blob in this image, assume it's the white circle if (blobs.Length > 0) { float[] pointArr = { blobs[biggest].Pt.X, blobs[biggest].Pt.Y }; Mat point = new Mat(1, 1, MatType.CV_32FC2, pointArr); Mat undistortedPoint = new Mat(1, 1, MatType.CV_32FC2, 0); Cv2.UndistortPoints(point, undistortedPoint, calibrationDevices[j].calibration.cameras[i].cameraMatrixMat, calibrationDevices[j].calibration.cameras[i].distCoeffsMat, calibrationDevices[j].calibration.cameras[i].rectificationMatrixMat); Point2f[] rectilinear = new Point2f[1]; undistortedPoint.GetArray(0, 0, rectilinear); Transform camera = i == 0 ? calibrationDevices[j].LeftCamera : calibrationDevices[j].RightCamera; rays[i] = new Ray(camera.position, camera.TransformDirection( new Vector3(-rectilinear[0].X, rectilinear[0].Y, 1f))); if (drawer != null) { drawer.color = ((j == 0) != (i == 0)) ? Color.cyan : Color.red; drawer.DrawRay(rays[i].origin, rays[i].direction); } } } } workingImage.Release(); // Only accept the triangulated point if the rays match up closely enough if (rays[0].origin != Vector3.zero && rays[1].origin != Vector3.zero) { Vector3 point1 = RayRayIntersection(rays[0], rays[1]); Vector3 point2 = RayRayIntersection(rays[1], rays[0]); if (Vector3.Distance(point1, point2) < 0.005f) { return((point1 + point2) * 0.5f); } else { return(Vector3.zero); } } else { return(Vector3.zero); } }
protected void Update() { HyperMegaStuff.HyperMegaLines drawer = HyperMegaStuff.HyperMegaLines.drawer; for (int j = 0; j < calibrationDevices.Length; j++) { if (!calibrationDevices[j].isConnected) { continue; } for (int i = 0; i < _realDots.Count; i++) { drawer.color = Color.red; _realDots[i] = calibrationDotsParent.GetChild(i).position; //drawer.DrawSphere(_realDots[i], 0.01f); if (calibrationDevices[j].triangulatedDots[i] != Vector3.zero) { drawer.color = Color.green; //drawer.DrawSphere(calibrationDevices[j].triangulatedDots[i], 0.01f); drawer.color = Color.white; drawer.DrawLine(_realDots[i], calibrationDevices[j].triangulatedDots[i]); } } Mat workingImage = new Mat(calibrationDevices[j].webcam.leftImage.Height, calibrationDevices[j].webcam.leftImage.Width, calibrationDevices[j].webcam.leftImage.Type(), 0); Mat workingImage2 = new Mat(calibrationDevices[j].webcam.leftImage.Height, calibrationDevices[j].webcam.leftImage.Width, calibrationDevices[j].webcam.leftImage.Type(), 0); for (int i = 0; i < 2; i++) { workingImage2.SetTo(0); Mat curMat = i == 0 ? calibrationDevices[j].webcam.leftImage : calibrationDevices[j].webcam.rightImage; // Undistort the image if the calibrations are available! if (calibrationDevices[j].undistortMapsInitialized && calibrationDevices[j].undistortImage) { Cv2.Remap(curMat, workingImage2, calibrationDevices[j].undistortMaps[(i * 2)], calibrationDevices[j].undistortMaps[(i * 2) + 1]); } else if (calibrationDevices[j].subtractionImage[i] != null) { // Subtract the background from the curMat Cv2.Subtract(curMat, calibrationDevices[j].subtractionImage[i], workingImage); workingImage.CopyTo(workingImage2, calibrationDevices[j].maskImage[i]); } int optimizerIndex = ((i == 0 && j == 0) || (i == 1 && j == 1)) ? 0 : 1; bool isBottom = j == 1; bool isLeft = optimizerIndex == 0; if (optimizers[optimizerIndex].solver == null || (optimizers[optimizerIndex].solver.isBottomRigel == isBottom && optimizers[optimizerIndex].solver.isLeft == isLeft)) { calibrationDevices[j].webcam.updateScreen(workingImage2, isLeft); } } workingImage.Release(); workingImage2.Release(); calculateImageMetrics(j); } }