//Calculates the target pose for the camera in order to appear in front of the marker. private Pose GetTargetPose(MarkerBehaviour marker) { Matrix4x4 m = marker.GetMatrix(); Matrix4x4 inverseMat = m.inverse; Pose p = new Pose(); p.rotation = ARucoUnityHelper.GetQuaternion(inverseMat); p.position = marker.transform.position; p.position += ARucoUnityHelper.GetPosition(inverseMat); return(p); }
private bool OnProcessTexture(WebCamTexture input, ref Texture2D output, ARucoUnityHelper.TextureConversionParams textureParameters) { textureParameters.FlipHorizontally = false; if (!float.TryParse(patternSizeString.value, out squareSizeMeters)) { return(false); } squareSizeMeters = float.Parse(patternSizeString.value); mat = ARucoUnityHelper.TextureToMat(input, textureParameters); //Debug.Log("Width: " + mat.Width + " Height: " + mat.Height); imageWidth = mat.Width; imageHeight = mat.Height; Cv2.CvtColor(mat, grayMat, ColorConversionCodes.BGR2GRAY); if (reset) { ResetCalibrationImmediate(); reset = false; } if (captureFrame) { RegisterCurrentCalib(); captureFrame = false; } if (calibrate) { StartCalibrateAsync(); calibrate = false; } output = ARucoUnityHelper.MatToTexture(mat, output); mat.Release(); return(true); }
//Updates Marker pose data by transforming the "transform matrix" received from open cv. //additional rotation can be passed so that the whole world matrix is rotated accordingly. //that is useful for AR-Foundation since there the camera view is on landscape mode and needs to be rotated properly private void UpdateMarkerPose(Matrix4x4 transformMatrix, Nullable <Vector3> additionalRotation = null) { //convert from open cv space to unity space Matrix4x4 matrixY = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Matrix4x4 matrixZ = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Matrix4x4 matrixX = Matrix4x4.TRS(Vector3.zero, Quaternion.Euler(-90, 0, 0), new Vector3(1, 1, 1)); currentTransformationMatrix = (matrixY * transformMatrix * matrixZ) * matrixX; //apply additional rotation if needed Matrix4x4 r = Matrix4x4.Rotate(Quaternion.Euler(additionalRotation.GetValueOrDefault(Vector3.zero))); currentTransformationMatrix = r * currentTransformationMatrix; //update the current marker pose position,rotation and scale currentMarkerPose.position = ARucoUnityHelper.GetPosition(currentTransformationMatrix); currentMarkerPose.rotation = ARucoUnityHelper.GetQuaternion(currentTransformationMatrix); currentMarkerPose.scale = ARucoUnityHelper.GetScale(currentTransformationMatrix); }
private bool ProcessTexture(WebCamTexture input, ref Texture2D output, ARucoUnityHelper.TextureConversionParams textureParameters) { imgBuffer = ARucoUnityHelper.TextureToMat(input, textureParameters); //Debug.Log("New image Assigned"); timeCount += Time.deltaTime; if (threadCounter == 0 && timeCount >= markerDetectorPauseTime) { imgBuffer.CopyTo(img); Interlocked.Increment(ref threadCounter); timeCount = 0; } updateThread = true; if (outputImage) { if (drawMarkerOutlines) { CvAruco.DrawDetectedMarkers(img, corners, ids); } output = ARucoUnityHelper.MatToTexture(img, output); //Debug.Log("Marker image Rendered"); outputImage = false; } else { output = ARucoUnityHelper.MatToTexture(imgBuffer, output); //Debug.Log("Camera image Rendered"); } imgBuffer.Release(); return(true); }
protected float GetMarkerDistanceFromCamera(MarkerBehaviour m) { return(ARucoUnityHelper.GetPosition(m.GetMatrix()).magnitude); }
unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs) { //Get the latest image XRCameraImage image; if (!cameraManager.TryGetLatestImage(out image)) { return; } timeCount += Time.deltaTime; //select the format of the texture var format = TextureFormat.RGBA32; //check if the texture changed, and only if so create a new one with the new changes if (texture == null || texture.width != image.width || texture.height != image.height) { texture = new Texture2D(image.width, image.height, format, false); } //mirror on the Y axis so that it fits open cv standarts var conversionParams = new XRCameraImageConversionParams(image, format, CameraImageTransformation.MirrorY); // try to apply raw texture data to the texture var rawTextureData = texture.GetRawTextureData <byte>(); try { image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length); } finally { //every Mat must be released before new data is assigned! image.Dispose(); } //apply texture texture.Apply(); texParam.FlipHorizontally = false; //create a Mat class from the texture imgBuffer = ARucoUnityHelper.TextureToMat(texture, texParam); // Increment thread counter if (threadCounter == 0 && timeCount >= markerDetectorPauseTime && arCamera.velocity.magnitude <= maxPositionChangePerFrame && cameraPoseTracker.rotationChange <= maxRotationChangePerFrameDegrees) { //copy the buffer data to the img Mat imgBuffer.CopyTo(img); Interlocked.Increment(ref threadCounter); timeCount = 0; } updateThread = true; //Show the texture if needed if (showOpenCvTexture) { openCvTexture.texture = ARucoUnityHelper.MatToTexture(imgBuffer, texture); } //release imgBuffer Mat imgBuffer.Release(); }