Example #1
0
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected void Initialize(int width, int height, double nearPlane, double farPlane, IList <Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth  = width;
            this.bufferHeight = height;
            this.isAdaptive   = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX        = markers[0].SegmentsX;
            int segmentY        = markers[0].SegmentsY;
            var patternMatchers = new List <PatternMatcher>(markers.Count);

            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            // Load deafult camera calibration data
            var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            var streamResInfoCam = Application.GetResourceStream(new Uri(asmName + ";component/data/Camera_Calibration_Default.dat", UriKind.Relative));
            var cameraParameters = new NyARParam();

            using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            {
                cameraParameters.loadARParam(cameraCalibrationDataStream);
                cameraParameters.changeScreenSize(width, height);
            }

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);

            this.squareDetector          = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }
        /// <summary>
        /// Initializes the detector for single marker detection.
        /// </summary>
        /// <param name="width">The width of the buffer that will be used for detection.</param>
        /// <param name="height">The height of the buffer that will be used for detection.</param>
        /// <param name="nearPlane">The near view plane of the frustum.</param>
        /// <param name="farPlane">The far view plane of the frustum.</param>
        /// <param name="markers">A list of markers that should be detected.</param>
        /// <param name="bufferType">The type of the buffer.</param>
        /// <param name="adaptive">Performs an adaptive bitmap thresholding if set to true. Default = false.</param>
        protected void Initialize(int width, int height, double nearPlane, double farPlane, IList<Marker> markers, int bufferType, bool adaptive = false)
        {
            // Check arguments
            if (markers == null || !markers.Any())
            {
                throw new ArgumentNullException("markers");
            }

            // Member init
            this.bufferWidth = width;
            this.bufferHeight = height;
            this.isAdaptive = adaptive;

            // Init pattern matchers with markers and check segment size, whcih has to be equal for all markers
            int segmentX = markers[0].SegmentsX;
            int segmentY = markers[0].SegmentsY;
            var patternMatchers = new List<PatternMatcher>(markers.Count);
            foreach (var marker in markers)
            {
                if (marker.SegmentsX != segmentX || marker.SegmentsY != segmentY)
                {
                    throw new ArgumentException("The Segment size has to be equal for all markers. Don't mix 16x16 and 32x32 markers for example.", "markers");
                }
                patternMatchers.Add(new PatternMatcher(marker));
            }

            // Load deafult camera calibration data
            string location = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location);
            StreamReader reader = new StreamReader(location + "/Content/Data/Camera_Calibration_1280x720.dat");
            var cameraParameters = new NyARParam();
            using (var cameraCalibrationDataStream = reader.BaseStream)
            {
                cameraParameters.loadARParam(cameraCalibrationDataStream);
                cameraParameters.changeScreenSize(width, height);
            }

            //var asmName = new System.Reflection.AssemblyName(System.Reflection.Assembly.GetExecutingAssembly().FullName).Name;

            //var uri = new Uri(asmName + ";component/Assets/data/Camera_Calibration_1280x720.dat", UriKind.Relative);
            //var streamResInfoCam = Application.GetResourceStream(uri);
            //if (null == streamResInfoCam)
            //    throw new FileNotFoundException("Application.GetResourceStream returned null", uri.OriginalString);

            //var cameraParameters = new NyARParam();
            //using (var cameraCalibrationDataStream = streamResInfoCam.Stream)
            //{
            //    cameraParameters.loadARParam(cameraCalibrationDataStream);
            //    cameraParameters.changeScreenSize(width, height);
            //}

            // Get projection matrix from camera calibration data
            this.Projection = cameraParameters.GetCameraFrustumRH(nearPlane, farPlane);

            // Init detector and necessary data
            var colorPattern = new NyARColorPatt_Perspective_O2(segmentX, segmentY, 4, 25);
            var patternMatchDeviationData = new NyARMatchPattDeviationColorData(segmentX, segmentY);
            this.squareDetector = new NyARSquareContourDetector_Rle(cameraParameters.getScreenSize());
            this.squareDetectionListener = new SquareDetectionListener(patternMatchers, cameraParameters, colorPattern, patternMatchDeviationData);

            // Init buffer members 
            this.filteredBuffer = new NyARBinRaster(width, height);
            if (adaptive)
            {
                this.bufferFilter = new NyARRasterFilter_AdaptiveThreshold(bufferType);
            }
            else
            {
                this.bufferFilter = new NyARRasterFilter_ARToolkitThreshold(this.Threshold, bufferType);
            }
        }