public override void Load(IServiceProvider provider) { showThreshold = false; var workflow = (ExpressionBuilderGraph)provider.GetService(typeof(ExpressionBuilderGraph)); var context = (ITypeVisualizerContext)provider.GetService(typeof(ITypeVisualizerContext)); if (context != null) { var workflowNode = workflow.First(node => node.Value == context.Source); var inputNode = workflow.Predecessors(workflowNode).Select(node => node.Value as InspectBuilder).FirstOrDefault(); var workflowElement = ExpressionBuilder.GetWorkflowElement(context.Source) as DetectMarkers; if (inputNode != null && workflowElement != null) { detectMarkers = workflowElement; imageThreshold = new MarkerDetector(); inputObserver = inputNode.Output.Merge().Subscribe(output => input = output as IplImage); } } base.Load(provider); StatusStripEnabled = false; VisualizerCanvas.Canvas.MouseClick += (sender, e) => { if (e.Button == MouseButtons.Right) { showThreshold = !showThreshold; } }; }
void Awake() { Instance = this; markerDetector = GetComponent <MarkerDetector>(); foreach (VideoData videoData in allVideoData) { nameToVideoData.Add(videoData.name, videoData); } }
public Pipeline() { // TODO: Inject index CameraReader = new CameraReader(2); MarkerDetector = new MarkerDetector(); QRCodeFinder = new QRCodeFinder(); QRCodeReader = new QRCodeReader(); CardManager = new CardManager(); AugmentedDrawer = new AugmentedDrawer(); }
public override void Unload() { if (inputObserver != null) { inputObserver.Dispose(); imageThreshold.Dispose(); imageThreshold = null; inputObserver = null; } base.Unload(); }
public override IDisposable Load() { detector = new MarkerDetector(); if (!string.IsNullOrEmpty(CameraParameters)) { parameters = new CameraParameters(); parameters.ReadFromXmlFile(CameraParameters); cameraMatrix = new CvMat(3, 3, CvMatDepth.CV_32F, 1); distortion = new CvMat(1, 4, CvMatDepth.CV_32F, 1); parameters.CopyParameters(cameraMatrix.DangerousGetHandle(), distortion.DangerousGetHandle()); } return(base.Load()); }
private void CameraLoop() { CancellationToken cancellationToken = cancellationTokenSource.Token; var parameters = new CameraParameters(); Size size; var cameraMatrix = new Mat(3, 3, Depth.F32, 1); var distortion = new Mat(1, 4, Depth.F32, 1); parameters.CopyParameters(cameraMatrix, distortion, out size); using (var detector = new MarkerDetector()) { detector.ThresholdMethod = ThresholdMethod.AdaptiveThreshold; detector.Param1 = 7.0; detector.Param2 = 7.0; detector.MinSize = 0.04f; detector.MaxSize = 0.5f; detector.CornerRefinement = CornerRefinementMethod.Lines; var markerSize = 10; using (var capture = Capture.CreateCameraCapture(0)) { while (!cancellationToken.IsCancellationRequested) { IplImage image = capture.QueryFrame(); var detectedMarkers = detector.Detect(image, cameraMatrix, distortion, markerSize); foreach (var marker in detectedMarkers) { if (marker.Id == _MarkerIndex) { double sideOffset = Map(marker.Center.X, 46, 590, -8, 8); double hightOffset = Map(marker.Center.Y, 46, 435, 6, -6); double height = Map(marker.Area, 6280, 10150, 18.5, 14); armDistance += hightOffset > 1.0 ? 0.4 : (hightOffset < -1.0 ? -0.4 : 0); armAngle += sideOffset > 1.0 ? 0.7 : (sideOffset < -1.0 ? -0.7 : 0); //armDistance += hightOffset; _Arm?.MoveToRelativeAsync(armAngle, armDistance, armHeight); marker.Draw(image, Scalar.Rgb(1, 0, 0)); } else { marker.Draw(image, Scalar.Rgb(0, 1, 0)); } } WindwoDisplay(image); } } } }
/// <summary> /// Sets up the detector and marker indicator to find this marker. /// </summary> public void Start() { this.lamb = new GameObject("lamb").transform; this.markerDetector = MarkerDetector.Instance; this.MetaScale = DefaultMetaScale; this.MetaDepth = DefaultMetaDepth; if (this.markerDetector == null) { throw new InvalidOperationException("The marker detector cannot be loaded. Is the Meta connected?"); } // Hide the markerindicator MarkerTargetIndicator indicator = this.markerDetector.GetComponent <MarkerTargetIndicator>(); indicator.enabled = false; }
public override IObservable <MarkerFrame> Process(IObservable <IplImage> source) { return(Observable.Defer(() => { Mat cameraMatrix = null; Mat distortion = null; CameraParameters parameters = null; var detector = new MarkerDetector(); Size size; cameraMatrix = new Mat(3, 3, Depth.F32, 1); distortion = new Mat(1, 4, Depth.F32, 1); var parametersFileName = CameraParameters; if (!string.IsNullOrEmpty(parametersFileName)) { if (!File.Exists(parametersFileName)) { throw new InvalidOperationException("Failed to open the camera parameters at the specified path."); } parameters = new CameraParameters(); parameters.ReadFromXmlFile(parametersFileName); parameters.CopyParameters(cameraMatrix, distortion, out size); } return source.Select(input => { detector.ThresholdMethod = ThresholdMethod; detector.Param1 = Param1; detector.Param2 = Param2; detector.MinSize = MinSize; detector.MaxSize = MaxSize; detector.CornerRefinement = CornerRefinement; var detectedMarkers = detector.Detect(input, cameraMatrix, distortion, MarkerSize); return new MarkerFrame(parameters, detectedMarkers); }); })); }
public Bitmap Step() { if (original == null) { return(null); } sw.Restart(); var markers = MarkerDetector.FindMarkers(original); var qrPositions = QRCodeFinder.FindQRCodes(markers); var qrCodes = QRCodeReader.ReadQRCodes(original, qrPositions); var cards = CardManager.AddOrMoveCards(qrCodes); var processed = AugmentedDrawer.DrawCardInfos(original, cards); sw.Stop(); if (sw.ElapsedMilliseconds > 0) { RunsPerSecond = 1000 / sw.ElapsedMilliseconds; } return(processed); }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0); // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; Camera.main.orthographicSize = webCamTexture.height / 2; //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } } else { if (webCamTexture.videoRotationAngle == 0) { } else if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } } } markerDetector.processFrame (rgbaMat, 1); //Debug.Log ("markerDetector.getTransformations ().Count " + markerDetector.getTransformations ().Count); for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].gameObject.SetActive (false); } int markerCount = markerDetector.getTransformations ().Count; for (int i = 0; i < markerCount; i++) { if (i > ARCamera.Length - 1) break; ARCamera [i].gameObject.SetActive (true); //Marker to Camera Coordinate System Convert Matrix transformationM = markerDetector.getTransformations () [i]; //Debug.Log ("transformationM " + transformationM.ToString ()); worldToCameraM = lookAtM * transformationM * invertZM; //Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera [i].worldToCameraMatrix = worldToCameraM; } Utils.matToTexture2D (rgbaMat, texture, colors); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } } void OnDisable () { webCamTexture.Stop (); } /// <summary> /// Gets the look at matrix. /// </summary> /// <returns>The look at matrix.</returns> /// <param name="pos">Position.</param> /// <param name="target">Target.</param> /// <param name="up">Up.</param> private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up) { Vector3 z = Vector3.Normalize (pos - target); Vector3 x = Vector3.Normalize (Vector3.Cross (up, z)); Vector3 y = Vector3.Normalize (Vector3.Cross (z, x)); Matrix4x4 result = new Matrix4x4 (); result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x)))); result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y)))); result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z)))); result.SetRow (3, new Vector4 (0, 0, 0, 1)); return result; } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("MarkerBasedARSample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } }
/// <summary> /// Method called when creating a UserController. /// </summary> public override void Init() { this.markerSensor = new MarkerSensor(new MarkerLocations(Path), new Normal(this.orientationStd), new Normal(this.positionStd)); this.markerDetector = MarkerDetector.Instance; this.markerTransform = new GameObject("UsedToCreateTransform").transform; }
/// <summary> /// Init this instance. /// </summary> private IEnumerator init () { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2 while (webCamTexture.width <= 16) { webCamTexture.GetPixels32 (); yield return new WaitForEndOfFrame (); } #endif #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; updateLayout (); //set cameraparam int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ()); camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, rgbaMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, rgbaMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV for (int i = 0; i < ARCamera.Length; i++) { ARCamera [i].fieldOfView = (float)fovy [0]; } markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); //Marker Coordinate Initial Matrix lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); screenOrientation = Screen.orientation; initDone = true; break; } else { yield return 0; } } }
/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); ScreenQuad.GetComponent <Renderer> ().material.mainTexture = texture; Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { } else { imageSizeScale = (float)Screen.height / (float)Screen.width; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //resize screen Quad Matrix4x4 p = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 1000f); Vector3 cameraSpacePos = UnProjectVector(p, new Vector3(1.0f, 1.0f, 1.0f)); if (widthScale > heightScale) { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.x * 2f, cameraSpacePos.x * height / width * 2f, 1); } else { ScreenQuad.transform.localScale = new Vector3(cameraSpacePos.y * width / height * 2f, cameraSpacePos.y * 2f, 1); } //create markerDetector MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
// Use this for initialization void Start() { gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); float width = imgMat.width(); float height = imgMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } MarkerDetector markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); markerDetector.processFrame(imgMat, 1); foreach (MarkerSettings settings in markerSettings) { settings.setAllARGameObjectsDisable(); } if (shouldMoveARCamera) { List <Marker> findMarkers = markerDetector.getFindMarkers(); if (findMarkers.Count > 0) { Marker marker = findMarkers [0]; if (markerSettings.Length > 0) { MarkerSettings settings = markerSettings [0]; if (marker.id == settings.getMarkerId()) { Matrix4x4 transformationM = marker.transformation; Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); GameObject ARGameObject = settings.getARGameObject(); if (ARGameObject != null) { Matrix4x4 ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM; Debug.Log("ARM " + ARM.ToString()); ARGameObject.SetActive(true); ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM); } } } } } else { List <Marker> findMarkers = markerDetector.getFindMarkers(); for (int i = 0; i < findMarkers.Count; i++) { Marker marker = findMarkers [i]; foreach (MarkerSettings settings in markerSettings) { if (marker.id == settings.getMarkerId()) { Matrix4x4 transformationM = marker.transformation; Debug.Log("transformationM " + transformationM.ToString()); Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); Matrix4x4 ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM; Debug.Log("ARM " + ARM.ToString()); GameObject ARGameObject = settings.getARGameObject(); if (ARGameObject != null) { ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM); ARGameObject.SetActive(true); } } } } } Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(imgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
public void ProcessImage(IVideoCapture captureDevice, byte[] imagePtr) { if (captureDevice.Format != ImageFormat.B8G8R8A8_32) throw new MarkerException("Only ImageFormat.B8G8R8A8_32 format is acceptable for NyARToolkitTracker"); // initialize the detector right before the image processing if (!started) { multiDetector = new MarkerDetector(param, codes.ToArray(), pattSizes.ToArray(), codes.Count, raster.getBufferType()); multiDetector.setContinueMode(continuousMode); started = true; } raster.SetBuffer(imagePtr); UpdateMarkerTransforms(); }
/// <summary> /// Associates a marker with an identifier so that the identifier can be used to find this /// marker after processing the image. /// </summary> /// <param name="markerConfigs">A set of parameters that identifies a maker. (e.g., for /// ARTag, this parameter would be the marker array name or marker ID)</param> /// <returns>An identifier for this marker object</returns> public Object AssociateMarker(params Object[] markerConfigs) { // make sure we are initialized if (!initialized) throw new MarkerException("ARToolkitTracker is not initialized. Call InitTracker(...)"); if (!(markerConfigs.Length == 2 || markerConfigs.Length == 5)) throw new MarkerException(GetAssocMarkerUsage()); MarkerInfo markerInfo = new MarkerInfo(); if (markerConfigs.Length == 2) { string arrayName = ""; ComputationMethod method = ComputationMethod.Average; try { arrayName = (String)markerConfigs[0]; method = (ComputationMethod)markerConfigs[1]; } catch (Exception) { throw new MarkerException(GetAssocMarkerUsage()); } ParseArray(arrayName, ref markerInfo); markerInfo.Method = method; } else { int pattWidth = 0, pattHeight = 0; float pattSize = 0, conf = 0; String pattName = ""; try { pattName = (String)markerConfigs[0]; pattWidth = (int)markerConfigs[1]; pattHeight = (int)markerConfigs[2]; pattSize = (float)markerConfigs[3]; conf = (float)markerConfigs[4]; } catch (Exception) { throw new MarkerException(GetAssocMarkerUsage()); } NyARCode code = new NyARCode(pattWidth, pattHeight); code.loadARPatt(new System.IO.StreamReader(TitleContainer.OpenStream(pattName))); codes.Add(code); pattSizes.Add(pattSize); PatternInfo info = new PatternInfo(); info.ConfidenceThreshold = conf; int id = codes.Count - 1; markerInfo.PatternInfos.Add(id, info); markerInfo.RelativeTransforms.Add(id, Matrix.Identity); markerInfo.Method = ComputationMethod.Average; markerInfoMap.Add(id, markerInfo); } markerInfoList.Add(markerInfo); // reinitialize the multi marker detector if the programmer adds new marker node // after the initialization phase if (started) { multiDetector = new MarkerDetector(param, codes.ToArray(), pattSizes.ToArray(), codes.Count, raster.getBufferType()); multiDetector.setContinueMode(continuousMode); } return markerInfo; }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); colors = new Color32[webCamTextureMat.cols() * webCamTextureMat.rows()]; texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = 0; float height = 0; width = gameObject.transform.localScale.x; height = gameObject.transform.localScale.y; float imageScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } gameObject.GetComponent <Renderer> ().material.mainTexture = texture; //set cameraparam int max_d = Mathf.Max(webCamTextureMat.rows(), webCamTextureMat.cols()); camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, max_d); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, webCamTextureMat.cols() / 2.0f); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, max_d); camMatrix.put(1, 2, webCamTextureMat.rows() / 2.0f); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(webCamTextureMat.cols() * imageScale, webCamTextureMat.rows() * imageScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV if (widthScale < heightScale) { ARCamera.fieldOfView = (float)fovx [0]; } else { ARCamera.fieldOfView = (float)fovy [0]; } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
void Start() { Debug.Assert(offsetObject != null, "ARCamera: No offsetObject set."); Debug.Assert(playerObject != null, "ARCamera: No playerObject set."); markerDetector = new MarkerDetector(); }
/// <summary> /// Raises the web cam texture to mat helper inited event. /// </summary> public void OnWebCamTextureToMatHelperInited() { Debug.Log("OnWebCamTextureToMatHelperInited"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float imageSizeScale = 1.0f; float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; imageSizeScale = (float)Screen.height / (float)Screen.width; } else { Camera.main.orthographicSize = height / 2; } //set cameraparam int max_d = (int)Mathf.Max(width, height); double fx = max_d; double fy = max_d; double cx = width / 2.0f; double cy = height / 2.0f; camMatrix = new Mat(3, 3, CvType.CV_64FC1); camMatrix.put(0, 0, fx); camMatrix.put(0, 1, 0); camMatrix.put(0, 2, cx); camMatrix.put(1, 0, 0); camMatrix.put(1, 1, fy); camMatrix.put(1, 2, cy); camMatrix.put(2, 0, 0); camMatrix.put(2, 1, 0); camMatrix.put(2, 2, 1.0f); Debug.Log("camMatrix " + camMatrix.dump()); distCoeffs = new MatOfDouble(0, 0, 0, 0); Debug.Log("distCoeffs " + distCoeffs.dump()); //calibration camera Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point(0, 0); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log("imageSize " + imageSize.ToString()); Debug.Log("apertureWidth " + apertureWidth); Debug.Log("apertureHeight " + apertureHeight); Debug.Log("fovx " + fovx [0]); Debug.Log("fovy " + fovy [0]); Debug.Log("focalLength " + focalLength [0]); Debug.Log("principalPoint " + principalPoint.ToString()); Debug.Log("aspectratio " + aspectratio [0]); //To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx)); double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy)); Debug.Log("fovXScale " + fovXScale); Debug.Log("fovYScale " + fovYScale); //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4 if (widthScale < heightScale) { ARCamera.fieldOfView = (float)(fovx [0] * fovXScale); } else { ARCamera.fieldOfView = (float)(fovy [0] * fovYScale); } MarkerDesign[] markerDesigns = new MarkerDesign[markerSettings.Length]; for (int i = 0; i < markerDesigns.Length; i++) { markerDesigns [i] = markerSettings [i].markerDesign; } markerDetector = new MarkerDetector(camMatrix, distCoeffs, markerDesigns); invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1)); Debug.Log("invertYM " + invertYM.ToString()); invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1)); Debug.Log("invertZM " + invertZM.ToString()); //if WebCamera is frontFaceing,flip Mat. if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing) { webCamTextureToMatHelper.flipHorizontal = true; } }
// Use this for initialization void Start () { gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1); Camera.main.orthographicSize = imgTexture.height / 2; Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4); Utils.texture2DToMat (imgTexture, imgMat); Debug.Log ("imgMat dst ToString " + imgMat.ToString ()); //set cameraparam int max_d = Mathf.Max (imgMat.rows (), imgMat.cols ()); Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1); camMatrix.put (0, 0, max_d); camMatrix.put (0, 1, 0); camMatrix.put (0, 2, imgMat.cols () / 2.0f); camMatrix.put (1, 0, 0); camMatrix.put (1, 1, max_d); camMatrix.put (1, 2, imgMat.rows () / 2.0f); camMatrix.put (2, 0, 0); camMatrix.put (2, 1, 0); camMatrix.put (2, 2, 1.0f); Debug.Log ("camMatrix " + camMatrix.dump ()); MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0); Debug.Log ("distCoeffs " + distCoeffs.dump ()); //calibration camera Size imageSize = new Size (imgMat.cols (), imgMat.rows ()); double apertureWidth = 0; double apertureHeight = 0; double[] fovx = new double[1]; double[] fovy = new double[1]; double[] focalLength = new double[1]; Point principalPoint = new Point (); double[] aspectratio = new double[1]; Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio); Debug.Log ("imageSize " + imageSize.ToString ()); Debug.Log ("apertureWidth " + apertureWidth); Debug.Log ("apertureHeight " + apertureHeight); Debug.Log ("fovx " + fovx [0]); Debug.Log ("fovy " + fovy [0]); Debug.Log ("focalLength " + focalLength [0]); Debug.Log ("principalPoint " + principalPoint.ToString ()); Debug.Log ("aspectratio " + aspectratio [0]); //Adjust Unity Camera FOV ARCamera.fieldOfView = (float)fovy [0]; // ARCamera.projectionMatrix = ARCamera.projectionMatrix * Matrix4x4.Scale(new Vector3(-1, -1, 1)); // gameObject.transform.localScale = new Vector3 (-1 * gameObject.transform.localScale.x, -1 * gameObject.transform.localScale.y, 1); MarkerDetector markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign); markerDetector.processFrame (imgMat, 1); //Marker Coordinate Initial Matrix Matrix4x4 lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0)); Debug.Log ("lookAt " + lookAtM.ToString ()); //Marker to Camera Coordinate System Convert Matrix if (markerDetector.getTransformations ().Count > 0) { Matrix4x4 transformationM = markerDetector.getTransformations () [0]; Debug.Log ("transformationM " + transformationM.ToString ()); //OpenGL to Unity Coordinate System Convert Matrix //http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1)); Debug.Log ("invertZM " + invertZM.ToString ()); Matrix4x4 worldToCameraM = lookAtM * transformationM * invertZM; Debug.Log ("worldToCameraM " + worldToCameraM.ToString ()); ARCamera.worldToCameraMatrix = worldToCameraM; } else { Debug.LogWarning ("Marker is not detected"); } Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (imgMat, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }