void DrawScreenPoints(ThreeStage_AR_Controller ARC) { Point[] c2_scrpoints = ARC.GetScreenpoints(); for (int i = 0; i < 7; i++) { Imgproc.circle(cached_initMat, c2_scrpoints[i], 10, new Scalar(255, 255, 0)); } }
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) { // Camera data extraction XRCameraImage image; if (!cameraManager.TryGetLatestImage(out image)) { Debug.Log("Uh Oh"); return; } Vector2 img_dim = image.dimensions; XRCameraImagePlane greyscale = image.GetPlane(0); // Instantiates new m_Texture if necessary if (m_Texture == null || m_Texture.width != image.width) { var format = TextureFormat.RGBA32; m_Texture = new Texture2D(image.width, image.height, format, false); } image.Dispose(); ThreeStage_AR_Controller ARC = m_ARSessionManager.GetComponent <ThreeStage_AR_Controller>(); // Process the image here: unsafe { IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr(); // TOUCH: Detect corners and set as source points if (Input.touchCount > 0) { Touch touch = Input.GetTouch(0); if (touch.phase == TouchPhase.Began) { // Cache original image Utils.copyToMat(greyPtr, cached_initMat); if (!spa_full) // Stage 1: Finding World Markers { m_ImageInfo.text = string.Format("Number of markers detected: {0} \n world_nulls {1}", count_src_nulls(), ARC.count_world_nulls()); ArucoDetection(); ARC.SetWorldPoints(); ARC.SetScreenPoints(); DrawScreenPoints(ARC); } else // Stage 2: Rectification of Captured Image Faces { m_ImageInfo.text = String.Format("world_nulls: {0}", ARC.count_world_nulls()); ARC.SetScreenPoints(); DrawScreenPoints(ARC); proj_point_array = ARC.GetScreenpoints(); GetFaces(ref proj_point_array); ShowFaces(img_dim); } Core.flip(cached_initMat, outMat, 0); } } // Displays OpenCV Mat as a Texture Utils.matToTexture2D(outMat, m_Texture, false, 0); } if (spa_full) // Stage 3: Real-time warping { ARC.SetScreenPoints(); proj_point_array = ARC.GetScreenpoints(); for (int i = 0; i < 3; i++) { m_ImageInfo.text = String.Format("Stage 3: {0}", i); HomographyTransform(i); } CombineWarped(); Utils.matToTexture2D(outMat, m_Texture, false, 0); } // Sets orientation of screen if necessary if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation) { // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation m_CachedOrientation = Screen.orientation; ConfigureRawImageInSpace(img_dim); } m_RawImage.texture = (Texture)m_Texture; // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size()); }
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) { // Camera data extraction XRCameraImage image; if (!cameraManager.TryGetLatestImage(out image)) { Debug.Log("Uh Oh"); return; } Vector2 img_dim = image.dimensions; XRCameraImagePlane greyscale = image.GetPlane(0); // Instantiates new m_Texture if necessary if (m_Texture == null || m_Texture.width != image.width) { var format = TextureFormat.RGBA32; m_Texture = new Texture2D(image.width, image.height, format, false); } image.Dispose(); ThreeStage_AR_Controller ARC = m_ARSessionManager.GetComponent <ThreeStage_AR_Controller>(); // Process the image here: unsafe { IntPtr greyPtr = (IntPtr)greyscale.data.GetUnsafePtr(); // TOUCH: Detect corners and set as source points if (Input.touchCount > 0) { Touch touch = Input.GetTouch(0); if (touch.phase == TouchPhase.Began) { // Cache original image Utils.copyToMat(greyPtr, cached_initMat); // if (!spa_full) { // Stage 1: Finding World Markers if (touch.position.x < image.width / 2) // Stage 1: Finding World Markers // Detect the markers (in c1 space) { ArucoDetection(); // Raycast and get World points ARC.SetWorldPoints(); // (For Testing) Extract c2 points and draw onto output. ARC.SetScreenPoints(); DrawScreenPoints(ARC); } else // Stage 2: Rectification of Captured Image Faces // Extract c2 points and draw onto output { ARC.SetScreenPoints(); DrawScreenPoints(ARC); // Caching the c2 world position ARC.CacheCamPoints(); // Getting dest points proj_point_array = ARC.GetScreenpoints(); // Rectify Faces and Display them GetFaces(ref proj_point_array); ShowFaces(img_dim); } Core.flip(cached_initMat, outMat, 0); } } // Displays OpenCV Mat as a Texture Utils.matToTexture2D(outMat, m_Texture, false, 0); } if (spa_full) // Stage 3: Real-time warping // Get c2 screenpoints { ARC.SetScreenPoints(); proj_point_array = ARC.GetScreenpoints(); // Get the closest camera position int closest_capture = ARC.GetClosestIndex(); // Warp rectified closest capture Mats for each face dependent on current position for (int i = 0; i < 3; i++) { m_ImageInfo.text = String.Format("Stage 3: {0}", i); HomographyTransform(i, closest_capture); } m_ImageInfo.text = String.Format("closest_capture : {0}", closest_capture); // Combined the warped images into one image CombineWarped(); // Display the combined image Utils.matToTexture2D(outMat, m_Texture, false, 0); } // Sets orientation of screen if necessary if (m_CachedOrientation == null || m_CachedOrientation != Screen.orientation) { // TODO: Debug why doesn't initiate with ConfigRawimage(). The null isn't triggering here. Print cached Orientation m_CachedOrientation = Screen.orientation; ConfigureRawImageInSpace(img_dim); } m_RawImage.texture = (Texture)m_Texture; // m_ImageInfo.text = string.Format("Number of Blobs: {0}", ids.size()); }