コード例 #1
0
        public AforgeForm()
        {
            InitializeComponent();

            _detectorParameters = DetectorParameters.GetDefault();

            try
            {
                videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);

                if (videoDevices.Count == 0)
                {
                    throw new Exception();
                }

                for (int i = 1, n = videoDevices.Count; i <= n; i++)
                {
                    string cameraName = i + " : " + videoDevices[i - 1].Name;
                    comboBoxCameraList.Items.Add(cameraName);
                }
            }
            catch (Exception)
            {
                throw;
            }
        }
コード例 #2
0
        public MarkerResult[] FindMarkers(Bitmap image)
        {
            Image <Bgr, byte> openCVImage = new Image <Bgr, byte>(image);

            Dictionary.PredefinedDictionaryName name = new Dictionary.PredefinedDictionaryName();
            Dictionary             Dict       = new Dictionary(name);
            VectorOfVectorOfPointF Corners    = new VectorOfVectorOfPointF();
            VectorOfInt            Ids        = new VectorOfInt();
            DetectorParameters     Parameters = DetectorParameters.GetDefault();

            VectorOfVectorOfPointF Rejected = new VectorOfVectorOfPointF();

            ArucoInvoke.DetectMarkers(openCVImage, Dict, Corners, Ids, Parameters, Rejected);

            var markers = new MarkerResult[Corners.Size];

            for (int i = 0; i < Corners.Size; i++)
            {
                var markerCorners = new Vector2[4];

                for (int y = 0; y < 4; y++)
                {
                    markerCorners[y] = new Vector2(Corners[i][y].X, Corners[i][y].Y);
                }

                markers[i] = new MarkerResult(Ids[i], markerCorners);
            }

            return(markers);
        }
コード例 #3
0
        public void CreateDetectorParameters()
        {
            var param = DetectorParameters.Create();

            Assert.Equal(3, param.AdaptiveThreshWinSizeMin);
            Assert.Equal(23, param.AdaptiveThreshWinSizeMax);
            Assert.Equal(10, param.AdaptiveThreshWinSizeStep);
            Assert.Equal(7, param.AdaptiveThreshConstant);
            Assert.Equal(0.03, param.MinMarkerPerimeterRate, 3);
            Assert.Equal(4, param.MaxMarkerPerimeterRate, 3);
            Assert.Equal(0.03, param.PolygonalApproxAccuracyRate, 3);
            Assert.Equal(0.05, param.MinCornerDistanceRate, 3);
            Assert.Equal(3, param.MinDistanceToBorder);
            Assert.Equal(0.05, param.MinMarkerDistanceRate, 3);
            Assert.Equal(CornerRefineMethod.None, param.CornerRefinementMethod);
            Assert.Equal(5, param.CornerRefinementWinSize);
            Assert.Equal(30, param.CornerRefinementMaxIterations);
            Assert.Equal(0.1, param.CornerRefinementMinAccuracy, 3);
            Assert.Equal(1, param.MarkerBorderBits);
            Assert.Equal(4, param.PerspectiveRemovePixelPerCell);
            Assert.Equal(0.13, param.PerspectiveRemoveIgnoredMarginPerCell, 3);
            Assert.Equal(0.35, param.MaxErroneousBitsInBorderRate, 3);
            Assert.Equal(5.0, param.MinOtsuStdDev, 3);
            Assert.Equal(0.6, param.ErrorCorrectionRate, 3);
            Assert.Equal(0f, param.AprilTagQuadDecimate, 3);
            Assert.Equal(0f, param.AprilTagQuadSigma, 3);
            Assert.Equal(5, param.AprilTagMinClusterPixels);
            Assert.Equal(10, param.AprilTagMaxNmaxima);
            Assert.Equal(0.175f, param.AprilTagCriticalRad, 3);
            Assert.Equal(10f, param.AprilTagMaxLineFitMse, 3);
            Assert.False(param.AprilTagDeglitch);
            Assert.Equal(5, param.AprilTagMinWhiteBlackDiff);
            Assert.False(param.DetectInvertedMarker);
        }
コード例 #4
0
		void Start () {
			// Create default parameres for detection
			DetectorParameters detectorParameters = DetectorParameters.Create();

			// Dictionary holds set of all available markers
			Dictionary dictionary = CvAruco.GetPredefinedDictionary (PredefinedDictionaryName.Dict6X6_250);

			// Variables to hold results
			Point2f[][] corners;
			int[] ids;
			Point2f[][] rejectedImgPoints;

			// Create Opencv image from unity texture
			Mat mat = Unity.TextureToMat (this.texture);

			// Convert image to grasyscale
			Mat grayMat = new Mat ();
			Cv2.CvtColor (mat, grayMat, ColorConversionCodes.BGR2GRAY); 

			// Detect and draw markers
			CvAruco.DetectMarkers (grayMat, dictionary, out corners, out ids, detectorParameters, out rejectedImgPoints);
			CvAruco.DrawDetectedMarkers (mat, corners, ids);

            

			// Create Unity output texture with detected markers
			Texture2D outputTexture = Unity.MatToTexture (mat);

			// Set texture to see the result
			RawImage rawImage = gameObject.GetComponent<RawImage> ();
			rawImage.texture = outputTexture;
		}
コード例 #5
0
 public FrameProcessor()
 {
     // готовим сканер кодов.
     Scanner.Cache = true;
     Scanner.SetConfiguration(0, ZBar.Config.Enable, 0);                      //запрещаем искать все коды
     Scanner.SetConfiguration(ZBar.SymbolType.QRCODE, ZBar.Config.Enable, 1); // разрешаем QR
     _detectorParameters = new DetectorParameters();
     _detectorParameters.AdaptiveThreshConstant        = 7;
     _detectorParameters.AdaptiveThreshWinSizeMax      = 23;
     _detectorParameters.AdaptiveThreshWinSizeMin      = 3;
     _detectorParameters.AdaptiveThreshWinSizeStep     = 10;
     _detectorParameters.CornerRefinementMaxIterations = 30;
     _detectorParameters.CornerRefinementMinAccuracy   = 0.1;
     _detectorParameters.CornerRefinementWinSize       = 5;
     _detectorParameters.CornerRefinementMethod        = DetectorParameters.RefinementMethod.None;
     _detectorParameters.ErrorCorrectionRate           = 0.6;
     _detectorParameters.MarkerBorderBits             = 1;
     _detectorParameters.MaxErroneousBitsInBorderRate = 0.35;
     _detectorParameters.MaxMarkerPerimeterRate       = 4.0;
     _detectorParameters.MinCornerDistanceRate        = 0.05;
     _detectorParameters.MinDistanceToBorder          = 3;
     _detectorParameters.MinMarkerDistanceRate        = 0.05;
     _detectorParameters.MinMarkerPerimeterRate       = 0.03;
     _detectorParameters.MinOtsuStdDev = 5.0;
     _detectorParameters.PerspectiveRemoveIgnoredMarginPerCell = 0.13;
     _detectorParameters.PerspectiveRemovePixelPerCell         = 8;
     _detectorParameters.PolygonalApproxAccuracyRate           = 0.1;
 }
コード例 #6
0
        public MainForm()
        {
            InitializeComponent();


            _detectorParameters = DetectorParameters.GetDefault();

            try
            {
                _capture = new VideoCapture();
                if (!_capture.IsOpened)
                {
                    _capture = null;
                    throw new NullReferenceException("Unable to open video capture");
                }
                else
                {
                    _capture.ImageGrabbed += ProcessFrame;
                }
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }
            UpdateMessage(String.Empty);
        }
コード例 #7
0
ファイル: ArucoTest.cs プロジェクト: zjxbetter/opencvsharp
        public void DrawDetectedMarker()
        {
            // If you want to save markers image, you must change the following values.
            const string path = "C:\\detected_markers_6x6_250.png";

            using (var image = Image("markers_6x6_250.png", ImreadModes.GrayScale))
                using (var outputImage = image.CvtColor(ColorConversionCodes.GRAY2RGB))
                    using (var dict = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250))
                        using (var param = DetectorParameters.Create())
                        {
                            Point2f[][] corners;
                            int[]       ids;
                            Point2f[][] rejectedImgPoints;
                            CvAruco.DetectMarkers(image, dict, out corners, out ids, param, out rejectedImgPoints);

                            CvAruco.DrawDetectedMarkers(outputImage, corners, ids, new Scalar(255, 0, 0));
                            CvAruco.DrawDetectedMarkers(outputImage, rejectedImgPoints, null, new Scalar(0, 0, 255));

                            if (Debugger.IsAttached)
                            {
                                Cv2.ImWrite(path, outputImage);
                                Process.Start(path);
                            }
                        }
        }
コード例 #8
0
ファイル: Aruco.cs プロジェクト: Awesome-CV/OpenCVB
        public void Run(cv.Mat src)
        {
            // The locations of the markers in the image at FilePath.Image.Aruco.
            const int upperLeftMarkerId  = 160;
            const int upperRightMarkerId = 268;
            const int lowerRightMarkerId = 176;
            const int lowerLeftMarkerId  = 168;

            var detectorParameters = DetectorParameters.Create();

            detectorParameters.CornerRefinementMethod  = CornerRefineMethod.Subpix;
            detectorParameters.CornerRefinementWinSize = 9;

            var dictionary = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict4X4_1000);

            CvAruco.DetectMarkers(src, dictionary, out var corners, out var ids, detectorParameters, out var rejectedPoints);
            detectedMarkers = src.Clone();
            CvAruco.DrawDetectedMarkers(detectedMarkers, corners, ids, cv.Scalar.White);

            // Find the index of the four markers in the ids array. We'll use this same index into the
            // corners array to find the corners of each marker.
            var upperLeftCornerIndex  = Array.FindIndex(ids, id => id == upperLeftMarkerId);
            var upperRightCornerIndex = Array.FindIndex(ids, id => id == upperRightMarkerId);
            var lowerRightCornerIndex = Array.FindIndex(ids, id => id == lowerRightMarkerId);
            var lowerLeftCornerIndex  = Array.FindIndex(ids, id => id == lowerLeftMarkerId);

            // Make sure we found all four markers.
            if (upperLeftCornerIndex < 0 || upperRightCornerIndex < 0 || lowerRightCornerIndex < 0 || lowerLeftCornerIndex < 0)
            {
                return;
            }

            // Marker corners are stored clockwise beginning with the upper-left corner.
            // Get the first (upper-left) corner of the upper-left marker.
            var upperLeftPixel = corners[upperLeftCornerIndex][0];
            // Get the second (upper-right) corner of the upper-right marker.
            var upperRightPixel = corners[upperRightCornerIndex][1];
            // Get the third (lower-right) corner of the lower-right marker.
            var lowerRightPixel = corners[lowerRightCornerIndex][2];
            // Get the fourth (lower-left) corner of the lower-left marker.
            var lowerLeftPixel = corners[lowerLeftCornerIndex][3];

            // Create coordinates for passing to GetPerspectiveTransform
            var sourceCoordinates = new List <cv.Point2f>
            {
                upperLeftPixel, upperRightPixel, lowerRightPixel, lowerLeftPixel
            };
            var destinationCoordinates = new List <cv.Point2f>
            {
                new cv.Point2f(0, 0),
                new cv.Point2f(1024, 0),
                new cv.Point2f(1024, 1024),
                new cv.Point2f(0, 1024),
            };

            var transform = cv.Cv2.GetPerspectiveTransform(sourceCoordinates, destinationCoordinates);

            normalizedImage = new cv.Mat();
            cv.Cv2.WarpPerspective(src, normalizedImage, transform, new cv.Size(1024, 1024));
        }
コード例 #9
0
ファイル: DetectObjectCL.cs プロジェクト: red-pencil/ISWC18
    public long DetectObjects(Mat image, List <Rectangle> objects, DetectorParameters param)
    {
        Stopwatch watch;

        watch = Stopwatch.StartNew();

        param.ScaleFactor  = Mathf.Max((float)param.ScaleFactor, 1.05f);
        param.MinNeighbors = Mathf.Max(param.MinNeighbors, 3);

        using (UMat ugray = new UMat(image.Width, image.Height, Emgu.CV.CvEnum.DepthType.Cv8U, 1))
        {
            //CvInvoke.CvtColor (image, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

            //normalizes brightness and increases contrast of the image
            CvInvoke.EqualizeHist(image, ugray);

            //Detect the faces  from the gray scale image and store the locations as rectangle
            //The first dimensional is the channel
            //The second dimension is the index of the rectangle in the specific channel
            Rectangle[] detected = _classifier.DetectMultiScale(
                ugray,
                param.ScaleFactor,
                param.MinNeighbors,
                new Size((int)param.MinSize.x, (int)param.MinSize.y),
                new Size((int)param.MaxSize.x, (int)param.MaxSize.y));

            objects.AddRange(detected);             /**/
        }
        watch.Stop();

        DetectionTime = watch.ElapsedMilliseconds;
        return(watch.ElapsedMilliseconds);
    }
コード例 #10
0
 void Start()
 {
     capture = new VideoCapture(0);
     capture.ImageGrabbed += HandleGrab;
     dico       = new Dictionary(Dictionary.PredefinedDictionaryName.Dict4X4_50);
     grid       = new GridBoard(4, 4, 80, 30, dico);
     arucoParam = DetectorParameters.GetDefault();
 }
コード例 #11
0
ファイル: Aruco.cs プロジェクト: holyris/CodeMiner
        public static void RefineDetectedMarkers(Cv.Mat image, Board board, Std.VectorVectorPoint2f detectedCorners, Std.VectorInt detectedIds,
                                                 Std.VectorVectorPoint2f rejectedCorners, Cv.Mat cameraMatrix, Cv.Mat distCoeffs, float minRepDistance, float errorCorrectionRate,
                                                 bool checkAllOrders, Std.VectorInt recoveredIdxs)
        {
            DetectorParameters parameters = new DetectorParameters();

            RefineDetectedMarkers(image, board, detectedCorners, detectedIds, rejectedCorners, cameraMatrix, distCoeffs, minRepDistance,
                                  errorCorrectionRate, checkAllOrders, recoveredIdxs, parameters);
        }
コード例 #12
0
ファイル: ArucoTest.cs プロジェクト: yyp2003net/opencvsharp
 public void DetectMarkers()
 {
     using (var image = Image("markers_6x6_250.png", ImreadModes.Grayscale))
         using (var dict = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250))
             using (var param = DetectorParameters.Create())
             {
                 CvAruco.DetectMarkers(image, dict, out _, out _, param, out _);
             }
 }
コード例 #13
0
ファイル: ArucoTest.cs プロジェクト: zjxbetter/opencvsharp
        public void DetectorParametersProperties()
        {
            var param = DetectorParameters.Create();

            const bool   boolValue   = true;
            const int    intValue    = 100;
            const double doubleValue = 10d;

            param.DoCornerRefinement = boolValue;

            param.AdaptiveThreshConstant       = doubleValue;
            param.CornerRefinementMinAccuracy  = doubleValue;
            param.ErrorCorrectionRate          = doubleValue;
            param.MaxErroneousBitsInBorderRate = doubleValue;
            param.MaxMarkerPerimeterRate       = doubleValue;
            param.MinCornerDistanceRate        = doubleValue;
            param.MinMarkerDistanceRate        = doubleValue;
            param.MinMarkerPerimeterRate       = doubleValue;
            param.MinOtsuStdDev = doubleValue;
            param.PerspectiveRemoveIgnoredMarginPerCell = doubleValue;
            param.PolygonalApproxAccuracyRate           = doubleValue;

            param.AdaptiveThreshWinSizeMax      = intValue;
            param.AdaptiveThreshWinSizeStep     = intValue;
            param.CornerRefinementMaxIterations = intValue;
            param.CornerRefinementWinSize       = intValue;
            param.MarkerBorderBits              = intValue;
            param.MinDistanceToBorder           = intValue;
            param.PerspectiveRemovePixelPerCell = intValue;
            param.AdaptiveThreshWinSizeMin      = intValue;

            Assert.AreEqual(boolValue, param.DoCornerRefinement);

            Assert.AreEqual(doubleValue, param.AdaptiveThreshConstant);
            Assert.AreEqual(doubleValue, param.CornerRefinementMinAccuracy);
            Assert.AreEqual(doubleValue, param.ErrorCorrectionRate);
            Assert.AreEqual(doubleValue, param.MaxErroneousBitsInBorderRate);
            Assert.AreEqual(doubleValue, param.MaxMarkerPerimeterRate);
            Assert.AreEqual(doubleValue, param.MinCornerDistanceRate);
            Assert.AreEqual(doubleValue, param.MinMarkerDistanceRate);
            Assert.AreEqual(doubleValue, param.MinMarkerPerimeterRate);
            Assert.AreEqual(doubleValue, param.MinOtsuStdDev);
            Assert.AreEqual(doubleValue, param.PerspectiveRemoveIgnoredMarginPerCell);
            Assert.AreEqual(doubleValue, param.PolygonalApproxAccuracyRate);

            Assert.AreEqual(intValue, param.AdaptiveThreshWinSizeMax);
            Assert.AreEqual(intValue, param.AdaptiveThreshWinSizeStep);
            Assert.AreEqual(intValue, param.CornerRefinementMaxIterations);
            Assert.AreEqual(intValue, param.CornerRefinementWinSize);
            Assert.AreEqual(intValue, param.MarkerBorderBits);
            Assert.AreEqual(intValue, param.MinDistanceToBorder);
            Assert.AreEqual(intValue, param.PerspectiveRemovePixelPerCell);
            Assert.AreEqual(intValue, param.AdaptiveThreshWinSizeMin);

            param.Dispose();
        }
コード例 #14
0
    private void Awake()
    {
        boardCorners        = new PointF[4];
        markerPositions     = new PointF[4];
        markerWrapPositions = new PointF[4];

        markerDict      = new Dictionary(Dictionary.PredefinedDictionaryName.Dict4X4_50);
        gridBoard       = new GridBoard(4, 4, 80, 30, markerDict);
        arucoParameters = DetectorParameters.GetDefault();
    }
コード例 #15
0
 public void InitKinect()
 {
     _sensor = KinectSensor.GetDefault();
     //_sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
     _rgbReader = _sensor.ColorFrameSource.OpenReader();
     //_rgbReader = _sensor.ColorFrameSource.OpenReader();
     _rgbReader.FrameArrived += rgbReader_FrameArrived;
     _sensor.Open();
     _detectorParameters = DetectorParameters.GetDefault();
 }
コード例 #16
0
    public Vector4 DetectMarkers(Mat rgbMat)
    {
        Mat        ids             = new Mat();
        List <Mat> corners         = new List <Mat>();
        List <Mat> rejectedCorners = new List <Mat>();
        Mat        rvecs           = new Mat();
        //Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
        Mat tvecs = new Mat();
        DetectorParameters detectorParams = DetectorParameters.create();
        Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);
        Vector4            ARM            = new Vector3();

        // detect markers
        Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

        // if at least one marker detected
        if (ids.total() > 0)
        {
            // estimate pose, from marker to camera
            Aruco.estimatePoseSingleMarkers(corners, markerSize, camMatrix, distCoeffs, rvecs, tvecs);

            // Marker centre location
            double[] tvecArr = tvecs.get(0, 0);

            // image flip + coordinates transformation (OpenCV to Unity)
            ARM = new Vector4((float)tvecArr[0] + 0.005f, (float)tvecArr[1] + 0.005f, -(float)tvecArr[2], 1f);

            Debug.Log("raw ARM " + ARM.ToString("f5"));

            //// Rotation and convert to Unity matrix format
            //double[] rvecArr = rvecs.get(0, 0);
            //Mat rvec = new Mat(3, 1, CvType.CV_64FC1);
            //rvec.put(0, 0, rvecArr);
            //Calib3d.Rodrigues(rvec, rotMat);
            //double[] rotMatArr = new double[rotMat.total()];
            //rotMat.get(0, 0, rotMatArr);

            //// Transformation matrix
            //Matrix4x4 transformationM = new Matrix4x4();
            //transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0]));
            //transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1]));
            //transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2]));
            //transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
            //Debug.Log("transformationM " + transformationM.ToString());

            success = true;
        }
        else
        {
            Debug.Log("not detected");
            success = false;
        }

        return(ARM);
    }
コード例 #17
0
    //private detectObject Detect

    void Start()
    {
        cam = new VideoCapture(source);
        cam.Set(CaptureProperty.ConvertRgb, 0); //これが重要!!
        cam_frame       = new Mat();
        leftRight_frame = new Mat[2];

        setCamParameters(); //カメラパラメータの設定

        ar_dict      = CvAruco.GetPredefinedDictionary(dictName);
        detect_param = DetectorParameters.Create();
    }
コード例 #18
0
ファイル: ArucoUnity.cs プロジェクト: brehm/aruco-unity
            public static void DetectMarkers(Cv.Core.Mat image, Dictionary dictionary, out Std.VectorVectorPoint2f corners, out Std.VectorInt ids,
                                             DetectorParameters parameters)
            {
                Cv.Core.Exception exception = new Cv.Core.Exception();
                System.IntPtr     cornersPtr, idsPtr;

                au_detectMarkers2(image.cppPtr, dictionary.cppPtr, out cornersPtr, out idsPtr, parameters.cppPtr, exception.cppPtr);
                corners = new Std.VectorVectorPoint2f(cornersPtr);
                ids     = new Std.VectorInt(idsPtr);

                exception.Check();
            }
コード例 #19
0
ファイル: ArucoTest.cs プロジェクト: zhoufan1987/opencvsharp
        public void CreateDetectorParameters()
        {
            var param = DetectorParameters.Create();

            Assert.Equal(3, param.AdaptiveThreshWinSizeMin);
            Assert.Equal(23, param.AdaptiveThreshWinSizeMax);
            Assert.Equal(10, param.AdaptiveThreshWinSizeStep);
            Assert.Equal(7, param.AdaptiveThreshConstant);
            Assert.Equal(0.03, param.MinMarkerPerimeterRate, 3);
            Assert.Equal(4, param.MaxMarkerPerimeterRate, 3);
            Assert.Equal(0.05, param.MinCornerDistanceRate, 3);
        }
コード例 #20
0
ファイル: ArucoTest.cs プロジェクト: zjxbetter/opencvsharp
 public void DetectMarkers()
 {
     using (var image = Image("markers_6x6_250.png", ImreadModes.GrayScale))
         using (var dict = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250))
             using (var param = DetectorParameters.Create())
             {
                 Point2f[][] corners;
                 int[]       ids;
                 Point2f[][] rejectedImgPoints;
                 CvAruco.DetectMarkers(image, dict, out corners, out ids, param, out rejectedImgPoints);
             }
 }
コード例 #21
0
ファイル: Aruco.cs プロジェクト: holyris/CodeMiner
        public static void DetectMarkers(Cv.Mat image, Dictionary dictionary, out Std.VectorVectorPoint2f corners, out Std.VectorInt ids,
                                         DetectorParameters parameters, out Std.VectorVectorPoint2f rejectedImgPoints)
        {
            Cv.Exception exception = new Cv.Exception();
            IntPtr       cornersPtr, idsPtr, rejectedPtr;

            au_detectMarkers(image.CppPtr, dictionary.CppPtr, out cornersPtr, out idsPtr, parameters.CppPtr, out rejectedPtr, exception.CppPtr);
            corners           = new Std.VectorVectorPoint2f(cornersPtr);
            ids               = new Std.VectorInt(idsPtr);
            rejectedImgPoints = new Std.VectorVectorPoint2f(rejectedPtr);

            exception.Check();
        }
コード例 #22
0
ファイル: beeagv_aruco.cs プロジェクト: yytx0802/CheatingCode
 public ArucoModule()
 {
     _detectorParameters = DetectorParameters.GetDefault();
     _capture            = new VideoCapture(camId);
     if (estimatePose)
     {
         bool readOk = readCameraParameters(_filename, ref _cameraMatrix, ref _distCoeffs);
         if (!readOk)
         {
             Console.WriteLine("cannot find parameter files");
             return;
         }
     }
 }
コード例 #23
0
    // Use this for initialization
    void Start()
    {
        cam = new VideoCapture(source);
        cam.Set(CaptureProperty.ConvertRgb, 3);
        cam_frame = new Mat();

        ar_dict      = CvAruco.GetPredefinedDictionary(dictName);
        detect_param = DetectorParameters.Create();

        cam_Texture = new Texture2D(
            cam.FrameWidth,      //カメラのwidth
            cam.FrameHeight,     //カメラのheight
            TextureFormat.RGB24, //フォーマットを指定
            false                //ミニマップ設定
            );
        this.GetComponent <Renderer>().material.mainTexture = cam_Texture;
    }
コード例 #24
0
    protected virtual void Init()
    {
        //For mobile devices which by default have their target frame set to 30
        Application.targetFrameRate = 60;

        //create OpenCV detector parameters
        detectorParameters = DetectorParameters.Create();
        detectorParameters.DoCornerRefinement = doCornerRefinement;
        //Apply different detection parameters
        AssignDetectorParameterStructValues();

        dictionary = CvAruco.GetPredefinedDictionary(markerDictionaryType);

        timeCount = markerDetectorPauseTime;

        //start the marker thread
        DetectMarkerAsync();
    }
コード例 #25
0
        public void EstimatePoseSingleMarkers()
        {
            using var image = Image("markers_6x6_250.png", ImreadModes.Grayscale);
            using var dict  = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250);
            var param = DetectorParameters.Create();

            CvAruco.DetectMarkers(image, dict, out var corners, out _, param, out _);

            using var cameraMatrix = Mat.Eye(3, 3, MatType.CV_64FC1);
            using var distCoeffs   = Mat.Zeros(4, 1, MatType.CV_64FC1);
            using var rvec         = new Mat();
            using var tvec         = new Mat();
            using var objPoints    = new Mat();
            CvAruco.EstimatePoseSingleMarkers(corners, 6, cameraMatrix, distCoeffs, rvec, tvec, objPoints);

            Assert.Equal(20, rvec.Total());
            Assert.Equal(20, tvec.Total());
            Assert.Equal(4, objPoints.Total());
        }
コード例 #26
0
        public static (VectorOfInt markerIds, VectorOfVectorOfPointF markerCorners, VectorOfInt charucoIds, VectorOfPointF charucoCorners) Detect(Mat image, int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary)
        {
            VectorOfInt            markerIds             = new VectorOfInt();
            VectorOfVectorOfPointF markerCorners         = new VectorOfVectorOfPointF();
            VectorOfInt            charucoIds            = new VectorOfInt();
            VectorOfPointF         charucoCorners        = new VectorOfPointF();
            VectorOfVectorOfPointF rejectedMarkerCorners = new VectorOfVectorOfPointF();

            DetectorParameters decParameters = DetectorParameters.GetDefault();

            ArucoInvoke.DetectMarkers(image, new Dictionary(dictionary), markerCorners, markerIds, decParameters, rejectedMarkerCorners);

            ArucoInvoke.RefineDetectedMarkers(image, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), markerCorners, markerIds, rejectedMarkerCorners, null, null, 10, 3, true, null, decParameters);

            if (markerIds.Size > 0)
            {
                ArucoInvoke.InterpolateCornersCharuco(markerCorners, markerIds, image, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), charucoCorners, charucoIds, null, null, 2);
            }

            return(markerIds, markerCorners, charucoIds, charucoCorners);
        }
コード例 #27
0
        /// <summary>
        /// Get the NFT data for the NFT
        /// </summary>
        /// <param name="address">Address of the sender</param>
        /// <param name="key">Private key of the sender for encryption</param>
        /// <param name="receiver">receiver of the NFT</param>
        /// <returns></returns>
        public override async Task <IDictionary <string, string> > GetMetadata(string address = "", string key = "", string receiver = "")
        {
            // create token metadata
            var metadata = await GetCommonMetadata();

            if (!string.IsNullOrEmpty(DeviceName))
            {
                metadata.Add("DeviceName", DeviceName);
            }
            if (!string.IsNullOrEmpty(DetectorName))
            {
                metadata.Add("DetectorName", DetectorName);
            }
            if (!string.IsNullOrEmpty(SourceName))
            {
                metadata.Add("SourceName", SourceName);
            }
            if (!string.IsNullOrEmpty(PositionerName))
            {
                metadata.Add("PositionerName", PositionerName);
            }

            if (!SourceParameters.IsDefault())
            {
                metadata.Add("SrcPos", JsonConvert.SerializeObject(SourceParameters));
            }
            if (!DetectorParameters.IsDefault())
            {
                metadata.Add("DetPar", JsonConvert.SerializeObject(DetectorParameters));
            }
            if (!PositionerParameters.IsDefault())
            {
                metadata.Add("PosPar", JsonConvert.SerializeObject(PositionerParameters));
            }

            return(metadata);
        }
コード例 #28
0
ファイル: Vision.cs プロジェクト: rexcardan/KinectX
        /// <summary>
        /// Finds ARUCO markers in an image and
        /// </summary>
        /// <param name="colorCv"></param>
        /// <returns></returns>
        public static List <Marker> FindAruco(Mat colorCv)
        {
            Mat m = new Mat();

            colorCv.CopyTo(m);
            if (colorCv.Channels() == 4)
            {
                m = m.CvtColor(ColorConversionCodes.BGRA2BGR);
            }
            //Kinect color image is flipped, correct before ARUCO detection
            var           flipped    = m.Flip(FlipMode.Y);
            List <Marker> markers    = new List <Marker>();
            var           dictionary = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250);

            Point2f[][] points;
            Point2f[][] rejPoints;
            int[]       ids;
            var         parms = DetectorParameters.Create();

            CvAruco.DetectMarkers(flipped, dictionary, out points, out ids, parms, out rejPoints);
            for (int i = 0; i < ids.Length; i++)
            {
                var id = ids[i];
                var arucoMarkerCorners = points[i];

                for (int j = 0; j < arucoMarkerCorners.Length; j++)
                {
                    arucoMarkerCorners[j].X = KinectSettings.COLOR_WIDTH - arucoMarkerCorners[j].X - 1;
                }
                var area = Cv2.ContourArea(arucoMarkerCorners);
                markers.Add(new Marker()
                {
                    Id = id, Points = arucoMarkerCorners, Area = area
                });
            }
            return(markers.OrderByDescending(ma => ma.Area).ToList());
        }
コード例 #29
0
        /// <summary>
        /// Get the NFT data for the NFT
        /// </summary>
        /// <param name="address">Address of the sender</param>
        /// <param name="key">Private key of the sender for encryption</param>
        /// <param name="receiver">receiver of the NFT</param>
        /// <returns></returns>
        public override async Task <IDictionary <string, string> > GetMetadata(string address = "", string key = "", string receiver = "")
        {
            // create token metadata
            var metadata = await GetCommonMetadata();

            if (!XrayParams.IsDefault())
            {
                metadata.Add("ExpPar", JsonConvert.SerializeObject(XrayParams));
            }
            if (!ObjectPosition.IsDefault())
            {
                metadata.Add("ObjPos", JsonConvert.SerializeObject(ObjectPosition));
            }
            if (!DetectorParameters.IsDefault())
            {
                metadata.Add("DetPar", JsonConvert.SerializeObject(DetectorParameters));
            }
            if (IsOriginal)
            {
                metadata.Add("IsOrig", "true");
            }
            if (IsRaw)
            {
                metadata.Add("IsRaw", "true");
            }
            if (IsAveraged)
            {
                metadata.Add("IsAvg", "true");
            }

            if (!string.IsNullOrEmpty(XrayDeviceNFTHash))
            {
                metadata.Add("XrayDeviceNFT", XrayDeviceNFTHash);
            }

            return(metadata);
        }
        private void InitializeCalibraton(Mat frameMat)
        {
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = frameMat.width();
            float height = frameMat.height();

            texture          = new Texture2D(frameMat.cols(), frameMat.rows(), TextureFormat.RGBA32, false);
            texture.wrapMode = TextureWrapMode.Clamp;

            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1);

            float imageSizeScale = 1.0f;

            // set cameraparam.
            camMatrix = CreateCameraMatrix(width, height);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);


            bgrMat          = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC3);
            rgbaMat         = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC4);
            ids             = new Mat();
            corners         = new List <Mat>();
            rejectedCorners = new List <Mat>();
            rvecs           = new List <Mat>();
            tvecs           = new List <Mat>();

            detectorParams = DetectorParameters.create();
            detectorParams.set_cornerRefinementMethod(1);// do cornerSubPix() of OpenCV.
            dictionary = Aruco.getPredefinedDictionary((int)dictionaryId);

            recoveredIdxs = new Mat();

            charucoCorners = new Mat();
            charucoIds     = new Mat();
            charucoBoard   = CharucoBoard.create((int)squaresX, (int)squaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);


            allCorners = new List <List <Mat> >();
            allIds     = new List <Mat>();
            allImgs    = new List <Mat>();

            imagePoints = new List <Mat>();

            isInitialized = true;
        }