Example #1
0
        public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
        {
            base.Draw(cameraId, dictionary, image);

            if (DetectedMarkers[cameraId][dictionary] > 0)
            {
                // Draw all the detected markers
                if (arucoTracker.DrawDetectedMarkers)
                {
                    Aruco.DrawDetectedMarkers(image, MarkerCorners[cameraId][dictionary], MarkerIds[cameraId][dictionary]);
                }

                // Draw axes of detected tracked markers
                if (arucoTracker.DrawAxes && arucoCameraUndistortion != null && MarkerRvecs[cameraId][dictionary] != null)
                {
                    for (uint i = 0; i < DetectedMarkers[cameraId][dictionary]; i++)
                    {
                        ArucoObject foundArucoObject;
                        int         detectedMarkerHashCode = ArucoMarker.GetArucoHashCode(MarkerIds[cameraId][dictionary].At(i));
                        if (arucoTracker.ArucoObjects[dictionary].TryGetValue(detectedMarkerHashCode, out foundArucoObject))
                        {
                            Aruco.DrawAxis(image, arucoCameraUndistortion.RectifiedCameraMatrices[cameraId], arucoCameraUndistortion.UndistortedDistCoeffs[cameraId],
                                           MarkerRvecs[cameraId][dictionary].At(i), MarkerTvecs[cameraId][dictionary].At(i), estimatePoseMarkerLength);
                        }
                    }
                }
            }

            // Draw the rejected marker candidates
            if (arucoTracker.DrawRejectedCandidates && RejectedCandidateCorners[cameraId][dictionary].Size() > 0)
            {
                Aruco.DrawDetectedMarkers(image, RejectedCandidateCorners[cameraId][dictionary]);
            }
        }
            // ArucoCameraUndistortion methods

            /// <summary>
            /// Initializes the <see cref="RectifiedCameraMatrices"/> using the <see cref="PerspectiveFieldOfViews"/> values for perspective rectification
            /// or uses the recommended values: https://docs.opencv.org/3.3.1/dd/d12/tutorial_omnidir_calib_main.html. Initializes the
            /// <see cref="RectificationMatrices"/> to identity matrix.
            /// </summary>
            protected override void InitializeRectification()
            {
                for (int cameraId = 0; cameraId < CameraParameters.CameraNumber; cameraId++)
                {
                    float imageWidth  = CameraParameters.ImageWidths[cameraId];
                    float imageHeight = CameraParameters.ImageHeights[cameraId];

                    if (RectificationType == RectificationTypes.Perspective)
                    {
                        float cameraFocalLength = imageHeight / (2f * Mathf.Tan(0.5f * PerspectiveFieldOfViews[cameraId] * Mathf.Deg2Rad));
                        RectifiedCameraMatrices[cameraId] = new Cv.Mat(3, 3, Cv.Type.CV_64F, new double[9] {
                            cameraFocalLength, 0, imageWidth / 2,
                            0, cameraFocalLength, imageHeight / 2,
                            0, 0, 1
                        }).Clone();
                    }
                    else
                    {
                        // Uses the camera matrix recommended values: https://docs.opencv.org/3.3.1/dd/d12/tutorial_omnidir_calib_main.html
                        RectifiedCameraMatrices[cameraId] = new Cv.Mat(3, 3, Cv.Type.CV_64F, new double[9] {
                            imageWidth / 3.1415, 0, 0,
                            0, imageHeight / 3.1415, 0,
                            0, 0, 1
                        }).Clone();
                    }

                    RectificationMatrices[cameraId] = noRectificationMatrix;
                }
            }
Example #3
0
            /// <summary>
            /// Detects the Aruco markers on the current images of the cameras and store the results in the <see cref="MarkerCornersCurrentImage"/> and
            /// <see cref="MarkerIdsCurrentImage"/> properties.
            /// </summary>
            public virtual void DetectMarkers()
            {
                if (!IsConfigured)
                {
                    throw new Exception("Configure the calibration controller before detect markers.");
                }

                for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++)
                {
                    Std.VectorInt           markerIds;
                    Std.VectorVectorPoint2f markerCorners, rejectedCandidateCorners;

                    Cv.Mat image = ArucoCamera.Images[cameraId];

                    Aruco.DetectMarkers(image, CalibrationBoard.Dictionary, out markerCorners, out markerIds, DetectorParameters, out rejectedCandidateCorners);

                    MarkerCornersCurrentImage[cameraId] = markerCorners;
                    MarkerIdsCurrentImage[cameraId]     = markerIds;

                    if (RefineMarkersDetection)
                    {
                        Aruco.RefineDetectedMarkers(image, CalibrationBoard.Board, MarkerCornersCurrentImage[cameraId], MarkerIdsCurrentImage[cameraId],
                                                    rejectedCandidateCorners);
                    }
                }
            }
Example #4
0
            // Methods

            /// <summary>
            /// Create the image and the image texture of the <see cref="ArucoObject"/>.
            /// </summary>
            public virtual void Create()
            {
                Cv.Mat image = null;
                ImageTexture = null;

                // In case of a marker
                ArucoMarker marker = ArucoObject as ArucoMarker;

                if (marker != null)
                {
                    marker.Dictionary.DrawMarker(marker.MarkerId, (int)marker.MarkerSideLength, out image, marker.MarkerBorderBits);
                }

                // In case of a grid board
                ArucoGridBoard arucoGridBoard = ArucoObject as ArucoGridBoard;

                if (arucoGridBoard != null)
                {
                    Aruco.GridBoard gridBoard = arucoGridBoard.Board as Aruco.GridBoard;
                    gridBoard.Draw(arucoGridBoard.ImageSize, out image, arucoGridBoard.MarginsSize, arucoGridBoard.MarkerBorderBits);
                }

                // In case of a charuco board
                ArucoCharucoBoard arucoCharucoBoard = ArucoObject as ArucoCharucoBoard;

                if (arucoCharucoBoard != null)
                {
                    Aruco.CharucoBoard charucoBoard = arucoCharucoBoard.Board as Aruco.CharucoBoard;
                    charucoBoard.Draw(arucoCharucoBoard.ImageSize, out image, arucoCharucoBoard.MarginsSize, arucoCharucoBoard.MarkerBorderBits);
                }

                // In case of a diamond
                ArucoDiamond diamond = ArucoObject as ArucoDiamond;

                if (diamond != null && diamond.Ids.Length == 4)
                {
                    Cv.Vec4i ids = new Cv.Vec4i();
                    for (int i = 0; i < diamond.Ids.Length; ++i)
                    {
                        ids.Set(i, diamond.Ids[i]);
                    }
                    Aruco.DrawCharucoDiamond(diamond.Dictionary, ids, (int)diamond.SquareSideLength, (int)diamond.MarkerSideLength, out image);
                }

                // Set the properties
                Image = image;
                if (Image != null)
                {
                    // Vertical flip to correctly display the image on the texture
                    int    verticalFlipCode = 0;
                    Cv.Mat imageForTexture  = Image.Clone();
                    Cv.Flip(imageForTexture, imageForTexture, verticalFlipCode);

                    // Load the image to the texture
                    int markerDataSize = (int)(Image.ElemSize() * Image.Total());
                    ImageTexture = new Texture2D(Image.Cols, Image.Rows, TextureFormat.RGB24, false);
                    ImageTexture.LoadRawTextureData(imageForTexture.DataIntPtr, markerDataSize);
                    ImageTexture.Apply();
                }
            }
            // ArucoCameraController methods

            /// <summary>
            /// Checks if <see cref="CameraParameters"/> is set, if <see cref="CameraParameters.CameraNumber"/> and <see cref="ArucoCamera.CameraNumber"/>
            /// are equals.
            /// </summary>
            public override void Configure()
            {
                base.Configure();

                // Check properties
                if (CameraParameters == null)
                {
                    throw new Exception("CameraParameters must be set to undistort the ArucoCamera images.");
                }
                if (CameraParameters.CameraNumber != ArucoCamera.CameraNumber)
                {
                    throw new Exception("The number of cameras in CameraParameters must be equal to the number of cameras in ArucoCamera");
                }

                // Initialize properties
                RectifiedCameraMatrices       = new Cv.Mat[CameraParameters.CameraNumber];
                RectificationMatrices         = new Cv.Mat[CameraParameters.CameraNumber];
                UndistortedDistCoeffs         = new Cv.Mat[CameraParameters.CameraNumber];
                UndistortionRectificationMaps = new Cv.Mat[CameraParameters.CameraNumber][];
                for (int cameraId = 0; cameraId < CameraParameters.CameraNumber; cameraId++)
                {
                    UndistortedDistCoeffs[cameraId]         = noDistCoeffs;
                    UndistortionRectificationMaps[cameraId] = new Cv.Mat[undistortionCameraMapsNumber];
                }

                OnConfigured();
            }
Example #6
0
        /// <summary>
        /// Initializes the <see cref="Images"/>, <see cref="ImageDataSizes"/>, <see cref="ImageDatas"/>,
        /// <see cref="NextImages"/>, <see cref="NextImageTextures"/> and <see cref="NextImageDatas"/> properties from the
        /// <see cref="Textures"/> property.
        /// </summary>
        protected override void OnStarted()
        {
            for (int cameraId = 0; cameraId < CameraNumber; cameraId++)
            {
                for (int bufferId = 0; bufferId < buffersCount; bufferId++)
                {
                    imageBuffers[bufferId][cameraId] = new Cv.Mat(Textures[cameraId].height, Textures[cameraId].width,
                                                                  CvMatExtensions.ImageType(Textures[cameraId].format));
                }

                ImageDataSizes[cameraId] = (int)(Images[cameraId].ElemSize() * Images[cameraId].Total());
                ImageRatios[cameraId]    = Textures[cameraId].width / (float)Textures[cameraId].height;

                for (int bufferId = 0; bufferId < buffersCount; bufferId++)
                {
                    imageDataBuffers[bufferId][cameraId]      = new byte[ImageDataSizes[cameraId]];
                    imageBuffers[bufferId][cameraId].DataByte = imageDataBuffers[bufferId][cameraId];
                }

                imagesToTextures[cameraId] = new Cv.Mat(Textures[cameraId].height, Textures[cameraId].width,
                                                        CvMatExtensions.ImageType(Textures[cameraId].format));
                imagesToTextureDatas[cameraId]      = new byte[ImageDataSizes[cameraId]];
                imagesToTextures[cameraId].DataByte = imagesToTextureDatas[cameraId];
            }

            base.OnStarted();
        }
Example #7
0
            public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                if (DetectedMarkers[cameraId][dictionary] > 0)
                {
                    // Draw all the detected markers
                    if (arucoTracker.DrawDetectedMarkers)
                    {
                        // TODO: draw only markers in ArucoObjects list + add option to draw all the detected markers
                        Aruco.DrawDetectedMarkers(image, MarkerCorners[cameraId][dictionary], MarkerIds[cameraId][dictionary]);
                    }

                    // Draw axes of detected tracked markers
                    if (arucoTracker.DrawAxes && cameraParameters != null && MarkerRvecs[cameraId][dictionary] != null)
                    {
                        for (uint i = 0; i < DetectedMarkers[cameraId][dictionary]; i++)
                        {
                            ArucoObject foundArucoObject;
                            int         detectedMarkerHashCode = ArucoMarker.GetArucoHashCode(MarkerIds[cameraId][dictionary].At(i));
                            if (arucoTracker.ArucoObjects[dictionary].TryGetValue(detectedMarkerHashCode, out foundArucoObject))
                            {
                                Aruco.DrawAxis(image, cameraParameters.CameraMatrices[cameraId], cameraParameters.DistCoeffs[cameraId],
                                               MarkerRvecs[cameraId][dictionary].At(i), MarkerTvecs[cameraId][dictionary].At(i), EstimatePoseMarkerLength);
                            }
                        }
                    }
                }

                // Draw the rejected marker candidates
                if (arucoTracker.DrawRejectedCandidates && RejectedCandidateCorners[cameraId][dictionary].Size() > 0)
                {
                    Aruco.DrawDetectedMarkers(image, RejectedCandidateCorners[cameraId][dictionary]);
                }
            }
Example #8
0
 /// <summary>
 /// Detect the ArUco objects for a camera on an custom image.
 /// </summary>
 /// <param name="cameraId">The id of the camera.</param>
 /// <param name="dictionary">The dictionary to use for the detection.</param>
 /// <param name="dictionary">The image to use for the detection.</param>
 public virtual void Detect(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
 {
     if (!IsActivated)
     {
         throw new Exception("Activate the tracker before detecting ArUco objects.");
     }
 }
            public override void Detect(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                base.Detect(cameraId, dictionary, image);

                ArucoMarkerTracker markerTracker = arucoTracker.MarkerTracker;

                Std.VectorVectorPoint2f diamondCorners = null;
                Std.VectorVec4i         diamondIds     = null;

                if (markerTracker.DetectedMarkers[cameraId][dictionary] > 0)
                {
                    if (arucoCameraUndistortion == null)
                    {
                        Aruco.DetectCharucoDiamond(image, markerTracker.MarkerCorners[cameraId][dictionary], markerTracker.MarkerIds[cameraId][dictionary],
                                                   DetectSquareMarkerLengthRate, out diamondCorners, out diamondIds);
                    }
                    else
                    {
                        Aruco.DetectCharucoDiamond(image, markerTracker.MarkerCorners[cameraId][dictionary], markerTracker.MarkerIds[cameraId][dictionary],
                                                   DetectSquareMarkerLengthRate, out diamondCorners, out diamondIds, arucoCameraUndistortion.RectifiedCameraMatrices[cameraId],
                                                   arucoCameraUndistortion.UndistortedDistCoeffs[cameraId]);
                    }
                }

                DiamondCorners[cameraId][dictionary]   = diamondCorners;
                DiamondIds[cameraId][dictionary]       = diamondIds;
                DetectedDiamonds[cameraId][dictionary] = (diamondIds != null) ? (int)diamondIds.Size() : 0;
            }
Example #10
0
            /// <summary>
            /// Initializes the tracking, Activates the trackers, susbcribes to the <see cref="ArucoObjectsController{T}.ArucoObjectAdded"/> and
            /// <see cref="ArucoObjectsController{T}.ArucoObjectRemoved"/> events and starts the tracking thread.
            /// </summary>
            public override void StartController()
            {
                base.StartController();

                // Initialize the tracking
                trackingImages           = new Cv.Mat[ArucoCamera.CameraNumber];
                trackingImagesData       = new byte[ArucoCamera.CameraNumber][];
                arucoCameraImageCopyData = new byte[ArucoCamera.CameraNumber][];
                for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++)
                {
                    arucoCameraImageCopyData[cameraId] = new byte[ArucoCamera.ImageDataSizes[cameraId]];
                    trackingImagesData[cameraId]       = new byte[ArucoCamera.ImageDataSizes[cameraId]];

                    Texture2D imageTexture = ArucoCamera.ImageTextures[cameraId];
                    trackingImages[cameraId]          = new Cv.Mat(imageTexture.height, imageTexture.width, CvMatExtensions.ImageType(imageTexture.format));
                    trackingImages[cameraId].DataByte = trackingImagesData[cameraId];
                }

                // Activate the trackers
                MarkerTracker.Activate(this);
                foreach (var arucoObjectDictionary in ArucoObjects)
                {
                    foreach (var arucoObject in arucoObjectDictionary.Value)
                    {
                        ArucoObjectsController_ArucoObjectAdded(arucoObject.Value);
                    }
                }

                // Subscribes to ArucoObjectsController and ArucoCamera events
                ArucoObjectAdded   += ArucoObjectsController_ArucoObjectAdded;
                ArucoObjectRemoved += ArucoObjectsController_ArucoObjectRemoved;

                arucoCameraImagesUpdated   = false;
                ArucoCamera.ImagesUpdated += ArucoCamera_ImagesUpdated;

                // Start the tracking thread
                trackingThread = new Thread(() =>
                {
                    try
                    {
                        while (IsConfigured && IsStarted)
                        {
                            trackingMutex.WaitOne();
                            Track();
                            trackingMutex.ReleaseMutex();
                        }
                    }
                    catch (Exception e)
                    {
                        trackingException = e;
                        trackingMutex.ReleaseMutex();
                    }
                });
                trackingThread.Start();

                OnStarted();
            }
Example #11
0
            private void InitUndistortRectifyMap(int cameraId, Cv.Mat rotationMatrix, Cv.Mat newCameraMatrix)
            {
                // Init the undistort rectify maps
                if (UndistortionType == UndistortionType.Pinhole)
                {
                    Cv.InitUndistortRectifyMap(CameraParameters.CameraMatrices[cameraId], CameraParameters.DistCoeffs[cameraId], rotationMatrix,
                                               newCameraMatrix, Images[cameraId].Size, Cv.Type.CV_16SC2, out undistordedImageMaps[cameraId][0], out undistordedImageMaps[cameraId][1]);
                }
                else if (new[] { UndistortionType.OmnidirPerspective, UndistortionType.OmnidirCylindrical, UndistortionType.OmnidirLonglati,
                                 UndistortionType.OmnidirStereographic }.Contains(UndistortionType))
                {
                    Cv.Omnidir.Rectifify flags = Cv.Omnidir.Rectifify.Perspective;
                    if (UndistortionType == UndistortionType.OmnidirCylindrical)
                    {
                        flags = Cv.Omnidir.Rectifify.Cylindrical;
                    }
                    else if (UndistortionType == UndistortionType.OmnidirLonglati)
                    {
                        flags = Cv.Omnidir.Rectifify.Longlati;
                    }
                    else if (UndistortionType == UndistortionType.OmnidirStereographic)
                    {
                        flags = Cv.Omnidir.Rectifify.Stereographic;
                    }

                    // If no newCameraMatrix, inititalize it with the recommended values
                    if (newCameraMatrix.Total() == 0)
                    {
                        double width = ImageTextures[cameraId].width, height = ImageTextures[cameraId].height;
                        if (flags == Cv.Omnidir.Rectifify.Perspective)
                        {
                            newCameraMatrix = new Cv.Mat(3, 3, Cv.Type.CV_64F, new double[9] {
                                width / 2, 0, width / 2, 0, height / 2, height / 2, 0, 0, 1
                            }).Clone();
                        }
                        else
                        {
                            newCameraMatrix = new Cv.Mat(3, 3, Cv.Type.CV_64F, new double[9] {
                                width / 3.1415, 0, 0, 0, height / 3.1415, 0, 0, 0, 1
                            }).Clone();
                        }
                    }

                    Cv.Omnidir.InitUndistortRectifyMap(CameraParameters.CameraMatrices[cameraId], CameraParameters.DistCoeffs[cameraId],
                                                       CameraParameters.OmnidirXis[cameraId], rotationMatrix, newCameraMatrix, Images[cameraId].Size, Cv.Type.CV_16SC2,
                                                       out undistordedImageMaps[cameraId][0], out undistordedImageMaps[cameraId][1], flags);
                }
                else
                {
                    throw new Exception("Unable to initialize the undistort rectify maps with this UndistortionType: " + UndistortionType);
                }

                // Update camera intrinsic parameters for the undistorted images
                CameraParameters.CameraMatrices[cameraId] = newCameraMatrix;
                CameraParameters.DistCoeffs[cameraId]     = new Cv.Mat();
            }
Example #12
0
            // Methods

            /// <summary>
            /// Configure the cameras and their properties.
            /// </summary>
            public virtual void Configure()
            {
                // Configure the flip codes to transfer images from Unity to OpenCV and vice-versa
                // The raw bytes from a Texture to a Mat and from a Mat to a Texture needs to be vertically flipped to be in the correct orientation
                if (!flipHorizontallyImages && !flipVerticallyImages)
                {
                    preDetectflipCode  = 0; // Vertical flip
                    postDetectflipCode = 0;
                }
                else if (flipHorizontallyImages && !flipVerticallyImages)
                {
                    preDetectflipCode  = 0;
                    postDetectflipCode = -1; // Flip on both axis
                }
                else if (!flipHorizontallyImages && flipVerticallyImages)
                {
                    preDetectflipCode  = null; // Don't flip, texture image is already flipped
                    postDetectflipCode = 0;
                }
                else if (flipHorizontallyImages && flipVerticallyImages)
                {
                    preDetectflipCode  = null;
                    postDetectflipCode = -1;
                }

                // Initialize the properties and variables
                images         = new Cv.Mat[CameraNumber];
                ImageDatas     = new byte[CameraNumber][];
                ImageDataSizes = new int[CameraNumber];
                ImageCameras   = new Camera[CameraNumber];
                ImageTextures  = new Texture2D[CameraNumber];

                cameraMatricesSave = new Cv.Mat[CameraNumber];
                distCoeffsSave     = new Cv.Mat[CameraNumber];

                if (CameraParameters != null)
                {
                    undistordedImageMaps = new Cv.Mat[CameraNumber][];
                    for (int cameraId = 0; cameraId < CameraNumber; cameraId++)
                    {
                        undistordedImageMaps[cameraId] = new Cv.Mat[2]; // map1 and map2
                    }
                }

                // Update state
                IsConfigured = true;
                OnConfigured();

                // AutoStart
                if (AutoStart)
                {
                    StartCameras();
                }
            }
Example #13
0
 /// <summary>
 /// Initializes the <see cref="Images"/>, <see cref="ImageDataSizes"/> and <see cref="ImageDatas"/> properties from the
 /// <see cref="ImageTextures"/> property.
 /// </summary>
 protected virtual void InitializeImages()
 {
     for (int cameraId = 0; cameraId < CameraNumber; cameraId++)
     {
         Images[cameraId]          = new Cv.Mat(ImageTextures[cameraId].height, ImageTextures[cameraId].width, CvMatExtensions.ImageType(ImageTextures[cameraId].format));
         ImageDataSizes[cameraId]  = (int)(Images[cameraId].ElemSize() * Images[cameraId].Total());
         ImageDatas[cameraId]      = new byte[ImageDataSizes[cameraId]];
         Images[cameraId].DataByte = ImageDatas[cameraId];
         ImageRatios[cameraId]     = ImageTextures[cameraId].width / (float)ImageTextures[cameraId].height;
     }
 }
Example #14
0
 public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
 {
     foreach (var arucoGridBoard in arucoTracker.GetArucoObjects <ArucoGridBoard>(dictionary))
     {
         if (arucoTracker.DrawAxes && cameraParameters != null && arucoGridBoard.Rvec != null)
         {
             Aruco.DrawAxis(image, cameraParameters.CameraMatrices[cameraId], cameraParameters.DistCoeffs[cameraId],
                            arucoGridBoard.Rvec, arucoGridBoard.Tvec, arucoGridBoard.AxisLength);
         }
     }
 }
Example #15
0
            public override void Detect(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                Std.VectorVectorPoint2f markerCorners, rejectedCandidateCorners;
                Std.VectorInt           markerIds;

                Aruco.DetectMarkers(image, dictionary, out markerCorners, out markerIds, arucoTracker.DetectorParameters, out rejectedCandidateCorners);

                DetectedMarkers[cameraId][dictionary]          = (int)markerIds.Size();
                MarkerCorners[cameraId][dictionary]            = markerCorners;
                MarkerIds[cameraId][dictionary]                = markerIds;
                RejectedCandidateCorners[cameraId][dictionary] = rejectedCandidateCorners;
            }
            // MonoBehaviour methods

            /// <summary>
            /// Initializes the properties.
            /// </summary>
            protected override void Start()
            {
                noRectificationMatrix = new Cv.Mat();
                noDistCoeffs          = new Cv.Mat();

                if (CameraParameters == null && CameraParametersController != null)
                {
                    CameraParameters = CameraParametersController.CameraParameters;
                }

                base.Start();
            }
    public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
    {
      base.Draw(cameraId, dictionary, image);

      foreach (var arucoGridBoard in arucoTracker.GetArucoObjects<ArucoGridBoard>(dictionary))
      {
        if (arucoTracker.DrawAxes && arucoCameraUndistortion != null && arucoGridBoard.Rvec != null)
        {
          Aruco.DrawAxis(image, arucoCameraUndistortion.RectifiedCameraMatrices[cameraId], arucoCameraUndistortion.UndistortedDistCoeffs[cameraId],
            arucoGridBoard.Rvec, arucoGridBoard.Tvec, arucoGridBoard.AxisLength);
        }
      }
    }
Example #18
0
            // ArucoObjectTracker methods

            public override void Detect(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                ArucoMarkerTracker markerTracker = arucoTracker.MarkerTracker;

                if (arucoTracker.RefineDetectedMarkers && arucoTracker.MarkerTracker.DetectedMarkers[cameraId][dictionary] > 0)
                {
                    foreach (var arucoBoard in arucoTracker.GetArucoObjects <ArucoGridBoard>(dictionary))
                    {
                        Aruco.RefineDetectedMarkers(image, arucoBoard.Board, markerTracker.MarkerCorners[cameraId][dictionary],
                                                    markerTracker.MarkerIds[cameraId][dictionary], markerTracker.RejectedCandidateCorners[cameraId][dictionary]);
                        markerTracker.DetectedMarkers[cameraId][dictionary] = (int)markerTracker.MarkerIds[cameraId][dictionary].Size();
                    }
                }
            }
        /// <summary>
        /// Initialize the properties.
        /// </summary>
        /// <param name="camerasNumber">The number of camera in the camera system. Must be equal to the number of cameras of the related
        /// <see cref="ArucoCamera"/>.</param>
        public ArucoCameraParameters(int camerasNumber)
        {
            CalibrationDateTime = DateTime.Now;
            CameraNumber        = camerasNumber;

            ImageHeights         = new int[CameraNumber];
            ImageWidths          = new int[CameraNumber];
            ReprojectionErrors   = new double[CameraNumber];
            CameraMatrices       = new Cv.Mat[CameraNumber];
            CameraMatricesValues = new double[CameraNumber][][];
            DistCoeffs           = new Cv.Mat[CameraNumber];
            DistCoeffsValues     = new double[CameraNumber][][];
            OmnidirXis           = new Cv.Mat[CameraNumber];
            OmnidirXisValues     = new double[CameraNumber][][];
        }
Example #20
0
            // ArucoObjectDetector methods

            /// <summary>
            /// Initialize the properties, the ArUco object list, and the tracking images.
            /// </summary>
            protected override void PreConfigure()
            {
                trackingImages           = new Cv.Mat[ArucoCamera.CameraNumber];
                trackingImagesData       = new byte[ArucoCamera.CameraNumber][];
                arucoCameraImageCopyData = new byte[ArucoCamera.CameraNumber][];
                for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++)
                {
                    arucoCameraImageCopyData[cameraId] = new byte[ArucoCamera.ImageDataSizes[cameraId]];
                    trackingImagesData[cameraId]       = new byte[ArucoCamera.ImageDataSizes[cameraId]];

                    Texture2D imageTexture = ArucoCamera.ImageTextures[cameraId];
                    trackingImages[cameraId]          = new Cv.Mat(imageTexture.height, imageTexture.width, ArucoCamera.ImageType(imageTexture.format));
                    trackingImages[cameraId].DataByte = trackingImagesData[cameraId];
                }

                MarkerTracker.Activate(this);
            }
Example #21
0
        // Methods

        /// <summary>
        /// Creates <see cref="Image"/> and <see cref="ImageTexture"/> from <see cref="ArucoObject"/>.
        /// </summary>
        public virtual void CreateImage()
        {
            Image = ArucoObject.Draw();

            if (Image != null)
            {
                // Vertical flip to correctly display the image on the texture
                int    verticalFlipCode = 0;
                Cv.Mat imageForTexture  = Image.Clone();
                Cv.Flip(imageForTexture, imageForTexture, verticalFlipCode);

                // Load the image to the texture
                int markerDataSize = (int)(Image.ElemSize() * Image.Total());
                ImageTexture = new Texture2D(Image.Cols, Image.Rows, TextureFormat.RGB24, false);
                ImageTexture.LoadRawTextureData(imageForTexture.DataIntPtr, markerDataSize);
                ImageTexture.Apply();
            }
        }
Example #22
0
            public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                foreach (var arucoCharucoBoard in arucoTracker.GetArucoObjects <ArucoCharucoBoard>(dictionary))
                {
                    if (arucoCharucoBoard.DetectedIds.Size() > 0)
                    {
                        if (arucoTracker.DrawDetectedCharucoMarkers)
                        {
                            Aruco.DrawDetectedCornersCharuco(image, arucoCharucoBoard.DetectedCorners, arucoCharucoBoard.DetectedIds);
                        }

                        if (arucoTracker.DrawAxes && cameraParameters != null && arucoCharucoBoard.Rvec != null)
                        {
                            Aruco.DrawAxis(image, cameraParameters.CameraMatrices[cameraId], cameraParameters.DistCoeffs[cameraId],
                                           arucoCharucoBoard.Rvec, arucoCharucoBoard.Tvec, arucoCharucoBoard.AxisLength);
                        }
                    }
                }
            }
Example #23
0
            /// <summary>
            /// Execute the <see cref="Started"/> action.
            /// </summary>
            protected void OnStarted()
            {
                // Save the CameraParameters property as the rectification in the undistortion process may alter it
                if (CameraParameters != null)
                {
                    for (int cameraId = 0; cameraId < CameraNumber; cameraId++)
                    {
                        cameraMatricesSave[cameraId] = new Cv.Mat(CameraParameters.CameraMatrices[cameraId].CppPtr);
                        CameraParameters.CameraMatrices[cameraId].DeleteResponsibility = Utility.DeleteResponsibility.False;

                        distCoeffsSave[cameraId] = new Cv.Mat(CameraParameters.DistCoeffs[cameraId].CppPtr);
                        CameraParameters.DistCoeffs[cameraId].DeleteResponsibility = Utility.DeleteResponsibility.False;
                    }
                }

                InitializeMatImages();

                IsStarted = true;
                Started();
            }
Example #24
0
            // Constructor

            public ArucoCameraSeparateThread(IArucoCamera arucoCamera, Action imagesUpdated, Action threadWork, Action threadException)
            {
                this.arucoCamera     = arucoCamera;
                this.imagesUpdated   = imagesUpdated;
                this.threadWork      = threadWork;
                this.threadException = threadException;

                Images     = new Cv.Mat[arucoCamera.CameraNumber];
                ImagesData = new byte[arucoCamera.CameraNumber][];
                arucoCameraImageCopyData = new byte[arucoCamera.CameraNumber][];
                for (int cameraId = 0; cameraId < arucoCamera.CameraNumber; cameraId++)
                {
                    ImagesData[cameraId] = new byte[arucoCamera.ImageDataSizes[cameraId]];
                    arucoCameraImageCopyData[cameraId] = new byte[arucoCamera.ImageDataSizes[cameraId]];

                    Images[cameraId] = new Cv.Mat(arucoCamera.ImageTextures[cameraId].height, arucoCamera.ImageTextures[cameraId].width,
                                                  CvMatExtensions.ImageType(arucoCamera.ImageTextures[cameraId].format));
                    Images[cameraId].DataByte = ImagesData[cameraId];
                }
            }
Example #25
0
        // Constructor

        public ArucoCameraSeparateThread(IArucoCamera arucoCamera, Action <Cv.Mat[]> threadWork)
        {
            this.arucoCamera = arucoCamera;
            this.threadWork  = threadWork;
            CopyBackImages   = false;

            for (int bufferId = 0; bufferId < buffersCount; bufferId++)
            {
                imageBuffers[bufferId]     = new Cv.Mat[arucoCamera.CameraNumber];
                imageDataBuffers[bufferId] = new byte[arucoCamera.CameraNumber][];

                for (int cameraId = 0; cameraId < arucoCamera.CameraNumber; cameraId++)
                {
                    imageBuffers[bufferId][cameraId] = new Cv.Mat(arucoCamera.Textures[cameraId].height, arucoCamera.Textures[cameraId].width,
                                                                  CvMatExtensions.ImageType(arucoCamera.Textures[cameraId].format));

                    imageDataBuffers[bufferId][cameraId]      = new byte[arucoCamera.ImageDataSizes[cameraId]];
                    imageBuffers[bufferId][cameraId].DataByte = imageDataBuffers[bufferId][cameraId];
                }
            }
        }
Example #26
0
            /// <summary>
            /// Return a property created from a <paramref name="propertyType"/> type and a array of values.
            /// </summary>
            /// <param name="propertyType">The type of the property.</param>
            /// <param name="propertyValues">The content of the propery.</param>
            /// <returns>The property</returns>
            internal static Cv.Mat[] CreateProperty(Cv.Type propertyType, double[][][] propertyValues)
            {
                int cameraNumber = propertyValues.Length;

                Cv.Mat[] property = new Cv.Mat[cameraNumber];
                for (int cameraId = 0; cameraId < cameraNumber; cameraId++)
                {
                    int rows = propertyValues[cameraId].Length,
                        cols = (rows > 0) ? propertyValues[cameraId][0].Length : 0;
                    property[cameraId] = new Cv.Mat(rows, cols, propertyType);
                    for (int i = 0; i < rows; i++)
                    {
                        for (int j = 0; j < cols; j++)
                        {
                            property[cameraId].AtDouble(i, j, propertyValues[cameraId][i][j]);
                        }
                    }
                }

                return(property);
            }
Example #27
0
            public override void Draw(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                if (DetectedDiamonds[cameraId][dictionary] > 0)
                {
                    // Draw detected diamonds
                    if (arucoTracker.DrawDetectedDiamonds)
                    {
                        Aruco.DrawDetectedDiamonds(image, DiamondCorners[cameraId][dictionary], DiamondIds[cameraId][dictionary]);
                    }

                    // Draw axes of detected diamonds
                    if (arucoTracker.DrawAxes && cameraParameters != null && DiamondRvecs[cameraId][dictionary] != null)
                    {
                        for (uint i = 0; i < DetectedDiamonds[cameraId][dictionary]; i++)
                        {
                            Aruco.DrawAxis(image, cameraParameters.CameraMatrices[cameraId], cameraParameters.DistCoeffs[cameraId],
                                           DiamondRvecs[cameraId][dictionary].At(i), DiamondTvecs[cameraId][dictionary].At(i), DrawAxisLength);
                        }
                    }
                }
            }
Example #28
0
        // ConfigurableController methods

        /// <summary>
        /// Configures the properties.
        /// </summary>
        protected override void Configuring()
        {
            base.Configuring();

            if (CameraNumber <= 0)
            {
                throw new Exception("It must have at least one camera.");
            }

            Textures       = new Texture2D[CameraNumber];
            ImageDataSizes = new int[CameraNumber];
            ImageRatios    = new float[CameraNumber];

            imagesToTextures     = new Cv.Mat[CameraNumber];
            imagesToTextureDatas = new byte[CameraNumber][];

            for (int bufferId = 0; bufferId < buffersCount; bufferId++)
            {
                imageBuffers[bufferId]     = new Cv.Mat[CameraNumber];
                imageDataBuffers[bufferId] = new byte[CameraNumber][];
            }

            if (!flipHorizontallyImages && !flipVerticallyImages)
            {
                imagesFlipCode = Cv.verticalFlipCode;
            }
            else if (flipHorizontallyImages && !flipVerticallyImages)
            {
                imagesFlipCode = Cv.bothAxesFlipCode;
            }
            else if (!flipHorizontallyImages && flipVerticallyImages)
            {
                imagesFlipCode = dontFlipCode; // Don't flip because the image textures are already vertically flipped
            }
            else if (flipHorizontallyImages && flipVerticallyImages)
            {
                imagesFlipCode = Cv.horizontalFlipCode; // Image textures are already vertically flipped
            }
        }
Example #29
0
            // ArucoObjectTracker methods

            public override void Detect(int cameraId, Aruco.Dictionary dictionary, Cv.Mat image)
            {
                base.Detect(cameraId, dictionary, image);

                ArucoMarkerTracker markerTracker = arucoTracker.MarkerTracker;

                foreach (var arucoCharucoBoard in arucoTracker.GetArucoObjects <ArucoCharucoBoard>(dictionary))
                {
                    if (arucoTracker.RefineDetectedMarkers)
                    {
                        Aruco.RefineDetectedMarkers(image, arucoCharucoBoard.Board, markerTracker.MarkerCorners[cameraId][dictionary],
                                                    markerTracker.MarkerIds[cameraId][dictionary], markerTracker.RejectedCandidateCorners[cameraId][dictionary]);
                        markerTracker.DetectedMarkers[cameraId][dictionary] = (int)markerTracker.MarkerIds[cameraId][dictionary].Size();
                    }

                    Std.VectorPoint2f charucoCorners = null;
                    Std.VectorInt     charucoIds     = null;

                    if (markerTracker.DetectedMarkers[cameraId][dictionary] > 0)
                    {
                        if (arucoCameraUndistortion == null)
                        {
                            Aruco.InterpolateCornersCharuco(markerTracker.MarkerCorners[cameraId][dictionary],
                                                            markerTracker.MarkerIds[cameraId][dictionary], arucoCamera.Images[cameraId],
                                                            (Aruco.CharucoBoard)arucoCharucoBoard.Board, out charucoCorners, out charucoIds);
                        }
                        else
                        {
                            Aruco.InterpolateCornersCharuco(markerTracker.MarkerCorners[cameraId][dictionary],
                                                            markerTracker.MarkerIds[cameraId][dictionary], arucoCamera.Images[cameraId],
                                                            (Aruco.CharucoBoard)arucoCharucoBoard.Board, out charucoCorners, out charucoIds, arucoCameraUndistortion.RectifiedCameraMatrices[cameraId],
                                                            arucoCameraUndistortion.UndistortedDistCoeffs[cameraId]);
                        }
                    }

                    arucoCharucoBoard.DetectedCorners = charucoCorners;
                    arucoCharucoBoard.DetectedIds     = charucoIds;
                }
            }
Example #30
0
            // Methods

            /// <summary>
            /// Configures the all the images related properties, calls the <see cref="Configured"/> event, and calls <see cref="StartController"/> if
            /// <see cref="AutoStart"/> is true.
            /// </summary>
            protected override void OnConfigured()
            {
                if (CameraNumber <= 0)
                {
                    throw new Exception("It must have at least one camera.");
                }

                // Initialize the properties and variables
                Images         = new Cv.Mat[CameraNumber];
                ImageDatas     = new byte[CameraNumber][];
                ImageDataSizes = new int[CameraNumber];
                ImageRatios    = new float[CameraNumber];
                ImageTextures  = new Texture2D[CameraNumber];

                // Configure the flip codes to transfer images from Unity to OpenCV and vice-versa
                // The raw bytes from a Texture to a Mat and from a Mat to a Texture needs to be vertically flipped to be in the correct orientation
                if (!flipHorizontallyImages && !flipVerticallyImages)
                {
                    preDetectflipCode = postDetectflipCode = Cv.verticalFlipCode;
                }
                else if (flipHorizontallyImages && !flipVerticallyImages)
                {
                    preDetectflipCode  = Cv.verticalFlipCode;
                    postDetectflipCode = Cv.bothAxesFlipCode;
                }
                else if (!flipHorizontallyImages && flipVerticallyImages)
                {
                    preDetectflipCode  = dontFlipCode; // Don't flip because texture image is already vertically flipped
                    postDetectflipCode = Cv.verticalFlipCode;
                }
                else if (flipHorizontallyImages && flipVerticallyImages)
                {
                    preDetectflipCode  = dontFlipCode; // Don't flip because texture image is already vertically flipped
                    postDetectflipCode = Cv.bothAxesFlipCode;
                }

                base.OnConfigured();
            }