Example #1
0
        private void SetupMarkerTracking()
        {
            DirectShowCapture captureDevice = new DirectShowCapture();

            captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480,
                                           ImageFormat.R8G8B8_24, false);

            scene.AddVideoCaptureDevice(captureDevice);

            // Use ALVAR marker tracker
            ALVARMarkerTracker tracker = new ALVARMarkerTracker();

            tracker.MaxMarkerError = 0.02f;
            tracker.ZNearPlane     = arCameraNode.Camera.ZNearPlane;
            tracker.ZFarPlane      = arCameraNode.Camera.ZFarPlane;

            tracker.InitTracker(captureDevice.Width, captureDevice.Height, "default_calib.xml", 32.4f);

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;

            // Display the camera image in the background. Note that this parameter should
            // be set after adding at least one video capture device to the Scene class.
            scene.ShowCameraImage = true;

            // Create a marker node to track a ground marker array.
            groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml");
            scene.RootNode.AddChild(groundMarkerNode);
        }
Example #2
0
        private void SetupCalibration()
        {
            leftCaptureDevice = new DirectShowCapture2();
            leftCaptureDevice.InitVideoCapture(leftDeviceID, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.R8G8B8_24, false);

            // Add left video capture device to the scene for rendering left eye image
            scene.AddVideoCaptureDevice(leftCaptureDevice);

            rightCaptureDevice = new DirectShowCapture2();
            rightCaptureDevice.InitVideoCapture(rightDeviceID, FrameRate._30Hz, Resolution._640x480,
                                                ImageFormat.R8G8B8_24, false);

            // Add right video capture device to the scene for rendering right eye image
            scene.AddVideoCaptureDevice(rightCaptureDevice);

            // Create holders for retrieving the captured video images
            leftImagePtr  = Marshal.AllocHGlobal(leftCaptureDevice.Width * leftCaptureDevice.Height * 3);
            rightImagePtr = Marshal.AllocHGlobal(rightCaptureDevice.Width * rightCaptureDevice.Height * 3);

            // Associate each video devices to each eye
            scene.LeftEyeVideoID  = 0;
            scene.RightEyeVideoID = 1;

            scene.ShowCameraImage = true;

            float markerSize = 32.4f;

            // Initialize a marker tracker for tracking an marker array used for calibration
            markerTracker = new ALVARMarkerTracker();
            markerTracker.MaxMarkerError = 0.02f;
            markerTracker.ZNearPlane     = 0.1f;
            markerTracker.ZFarPlane      = 1000;
            markerTracker.InitTracker(leftCaptureDevice.Width, leftCaptureDevice.Height, LEFT_CALIB, markerSize);
            ((StereoCamera)scene.CameraNode.Camera).LeftProjection = markerTracker.CameraProjection;

            // Add another marker detector for tracking right video capture device
            ALVARDllBridge.alvar_add_marker_detector(markerSize, 5, 2);

            ALVARDllBridge.alvar_add_camera(RIGHT_CALIB, rightCaptureDevice.Width, rightCaptureDevice.Height);
            double[] projMat = new double[16];
            double   cameraFovX = 0, cameraFovY = 0;

            ALVARDllBridge.alvar_get_camera_params(1, projMat, ref cameraFovX, ref cameraFovY, 1000, 0.1f);
            ((StereoCamera)scene.CameraNode.Camera).RightProjection = new Matrix(
                (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3],
                (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7],
                (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11],
                (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]);

            // Add a marker array to be tracked
            markerID = markerTracker.AssociateMarker("ALVARGroundArray.xml");

            relativeTransforms = new List <Matrix>();
        }
Example #3
0
        private void SetupMarkerTracking()
        {
            IVideoCapture captureDevice = null;

            if (useStaticImage)
            {
                captureDevice = new NullCapture();
                captureDevice.InitVideoCapture(1, FrameRate._30Hz, Resolution._800x600,
                                               ImageFormat.R8G8B8_24, false);
                ((NullCapture)captureDevice).StaticImageFile = "MarkerImage";
            }
            else
            {
                // Create our video capture device that uses DirectShow library. Note that
                // the combinations of resolution and frame rate that are allowed depend on
                // the particular video capture device. Thus, setting incorrect resolution
                // and frame rate values may cause exceptions or simply be ignored, depending
                // on the device driver.  The values set here will work for a Microsoft VX 6000,
                // and many other webcams.
                captureDevice = new DirectShowCapture2();
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.R8G8B8_24, false);
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            IMarkerTracker tracker = null;


            // Create an optical marker tracker that uses ALVAR library
            tracker = new ALVARMarkerTracker();
            ((ALVARMarkerTracker)tracker).MaxMarkerError = 0.02f;
            tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0);

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;

            // Display the camera image in the background. Note that this parameter should
            // be set after adding at least one video capture device to the Scene class.
            scene.ShowCameraImage = true;
        }
Example #4
0
        private void SetupMarkerTracking()
        {
            DirectShowCapture captureDevice = new DirectShowCapture();

            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                           ImageFormat.R8G8B8_24, false);

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            // if we're using Wrap920AR, then we need to add another capture device for
            // processing stereo camera
            DirectShowCapture captureDevice2 = null;

            if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920)
            {
                captureDevice2 = new DirectShowCapture();
                captureDevice2.InitVideoCapture(1, FrameRate._30Hz, Resolution._640x480,
                                                ImageFormat.R8G8B8_24, false);

                scene.AddVideoCaptureDevice(captureDevice2);

                // Calculate the right projection matrix using the camera intrinsic parameters for the
                // right camera
                ((StereoCamera)scene.CameraNode.Camera).RightProjection =
                    ALVARDllBridge.GetCameraProjection("Wrap920_Right.xml", captureDevice2.Width,
                                                       captureDevice2.Height, 0.1f, 1000);
            }

            // Create an optical marker tracker that uses ALVAR library
            ALVARMarkerTracker tracker = new ALVARMarkerTracker();

            tracker.MaxMarkerError = 0.02f;
            tracker.ZNearPlane     = 0.1f;
            tracker.ZFarPlane      = 1000;
            tracker.InitTracker(captureDevice.Width, captureDevice.Height, "Wrap920_Left.xml", markerSize);

            ((StereoCamera)scene.CameraNode.Camera).LeftProjection = tracker.CameraProjection;

            scene.MarkerTracker = tracker;

            if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920)
            {
                scene.LeftEyeVideoID  = 0;
                scene.RightEyeVideoID = 1;
                scene.TrackerVideoID  = 0;
            }
            else
            {
                scene.LeftEyeVideoID  = 0;
                scene.RightEyeVideoID = 0;
                scene.TrackerVideoID  = 0;
            }

            // Create a marker node to track a ground marker array.
            groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml");

            // Add a transform node to tranlate the objects to be centered around the
            // marker board.
            TransformNode transNode = new TransformNode();

            scene.RootNode.AddChild(groundMarkerNode);

            scene.ShowCameraImage = true;
        }