void OnEnable() { // Set target scale patternSize = new Size((int)pattern.x, (int)pattern.y); chessboard.transform.localScale = new Vector3(patternScale * (patternSize.Width + 1), patternScale * (patternSize.Height + 1), 1.0f); // Construct world corner points Vector2 offset = new Vector2(patternSize.Width / 2.0f * patternScale, patternSize.Height / 2.0f * patternScale); cvWorldCorners = new Matrix <double>(patternSize.Height * patternSize.Width, 1, 3); for (int iy = 0; iy < patternSize.Height; iy++) { for (int ix = 0; ix < patternSize.Width; ix++) { cvWorldCorners.Data[iy * patternSize.Width + ix, 0] = ix * patternScale - offset.x; cvWorldCorners.Data[iy * patternSize.Width + ix, 1] = iy * patternScale - offset.y; cvWorldCorners.Data[iy * patternSize.Width + ix, 2] = 0; } } webcamTexture = TrackedCameraScript.GetViveCameraTexture(); if (webcamTexture == null) { return; } HmdVector2_t focalLength = new HmdVector2_t(); HmdVector2_t opticCenter = new HmdVector2_t(); OpenVR.TrackedCamera.GetCameraIntrinsics(0, EVRTrackedCameraFrameType.Undistorted, ref focalLength, ref opticCenter); // Initialize intrinsic parameters cvIntrinsicParams = new Matrix <double>(3, 3, 1); cvIntrinsicParams[0, 0] = focalLength.v0; cvIntrinsicParams[0, 1] = 0; cvIntrinsicParams[0, 2] = opticCenter.v0; cvIntrinsicParams[1, 0] = 0; cvIntrinsicParams[1, 1] = focalLength.v1; cvIntrinsicParams[1, 2] = opticCenter.v1; cvIntrinsicParams[2, 0] = 0; cvIntrinsicParams[2, 1] = 0; cvIntrinsicParams[2, 2] = 1; cvDistortionParams = new Matrix <double>(4, 1, 1); cvDistortionParams[0, 0] = 0.0f; cvDistortionParams[1, 0] = 0.0f; cvDistortionParams[2, 0] = 0.0f; cvDistortionParams[3, 0] = 0.0f; }
void Update() { webcamTexture = TrackedCameraScript.GetViveCameraTexture(); if (webcamTexture == null) { return; } if (webcamTexture != null) { if (data == null || (data.Length != webcamTexture.width * webcamTexture.height)) { data = new Color32[webcamTexture.width * webcamTexture.height]; } data = webcamTexture.GetPixels32(); //data = webcamTexture.GetPixels32(0); if (bytes == null || bytes.Length != data.Length * 3) { bytes = new byte[data.Length * 3]; } if (grayBytes == null || grayBytes.Length != data.Length * 1) { grayBytes = new byte[data.Length * 1]; } // OPENCV PROCESSING GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); GCHandle resultHandle = GCHandle.Alloc(bytes, GCHandleType.Pinned); GCHandle grayHandle = GCHandle.Alloc(grayBytes, GCHandleType.Pinned); Mat currentWebcamMat = new Mat(new Size(webcamTexture.width, webcamTexture.height), DepthType.Cv8U, 4, handle.AddrOfPinnedObject(), webcamTexture.width * 4); Mat resultMat = new Mat(webcamTexture.height, webcamTexture.width, DepthType.Cv8U, 3, resultHandle.AddrOfPinnedObject(), webcamTexture.width * 3); Mat grayMat = new Mat(webcamTexture.height, webcamTexture.width, DepthType.Cv8U, 1, grayHandle.AddrOfPinnedObject(), webcamTexture.width * 1); CvInvoke.CvtColor(currentWebcamMat, resultMat, ColorConversion.Bgra2Bgr); CvInvoke.CvtColor(resultMat, grayMat, ColorConversion.Bgra2Gray); cvImageCorners = new Matrix <float>(patternSize.Width * patternSize.Height, 1, 2); bool detected = DetectCheckerboard(grayMat, resultMat); if (detected) { detectionFinished = true; SetCameraTransformFromChessboard(); } handle.Free(); resultHandle.Free(); grayHandle.Free(); if (flip != FlipType.None) { CvInvoke.Flip(resultMat, resultMat, flip); } if (displayTexture == null || displayTexture.width != webcamTexture.width || displayTexture.height != webcamTexture.height) { displayTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGB24, false); } displayTexture.LoadRawTextureData(bytes); displayTexture.Apply(); TrackedCameraScript.Instance.screenRenderer.material.mainTexture = displayTexture; } }
private void Awake() { Instance = this; }