private void SetupMarkerTracking() { DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice); // Use ALVAR marker tracker ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.ZNearPlane = arCameraNode.Camera.ZNearPlane; tracker.ZFarPlane = arCameraNode.Camera.ZFarPlane; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "default_calib.xml", 32.4f); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); scene.RootNode.AddChild(groundMarkerNode); }
private void SetupFeatureTracking() { // Create our video capture device that uses DirectShow library. Note that // the combinations of resolution and frame rate that are allowed depend on // the particular video capture device. Thus, setting incorrect resolution // and frame rate values may cause exceptions or simply be ignored, depending // on the device driver. The values set here will work for a Microsoft VX 6000, // and many other webcams. DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); // Create a feature tracker that uses the ALVAR library ALVARFeatureTracker tracker = new ALVARFeatureTracker(); tracker.InitTracker(captureDevice.Width, captureDevice.Height, "default_calib.xml"); // Set the feature tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; }
private void SetupMarkerTracking() { IVideoCapture captureDevice = null; if (useStaticImage) { captureDevice = new NullCapture(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._800x600, ImageFormat.R8G8B8_24, false); ((NullCapture)captureDevice).StaticImageFile = "MarkerImage"; } else { // Create our video capture device that uses DirectShow library. Note that // the combinations of resolution and frame rate that are allowed depend on // the particular video capture device. Thus, setting incorrect resolution // and frame rate values may cause exceptions or simply be ignored, depending // on the device driver. The values set here will work for a Microsoft VX 6000, // and many other webcams. captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); } // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 32.4f); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; }
private void SetupMarkerTracking() { DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); // if we're using Wrap920AR, then we need to add another capture device for // processing stereo camera DirectShowCapture captureDevice2 = null; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { captureDevice2 = new DirectShowCapture(); captureDevice2.InitVideoCapture(1, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice2); // Calculate the right projection matrix using the camera intrinsic parameters for the // right camera ((StereoCamera)scene.CameraNode.Camera).RightProjection = ALVARDllBridge.GetCameraProjection("Wrap920_Right.xml", captureDevice2.Width, captureDevice2.Height, 0.1f, 1000); } // Create an optical marker tracker that uses ALVAR library ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.ZNearPlane = 0.1f; tracker.ZFarPlane = 1000; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "Wrap920_Left.xml", markerSize); ((StereoCamera)scene.CameraNode.Camera).LeftProjection = tracker.CameraProjection; scene.MarkerTracker = tracker; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.TrackerVideoID = 0; } else { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 0; scene.TrackerVideoID = 0; } // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); // Add a transform node to tranlate the objects to be centered around the // marker board. TransformNode transNode = new TransformNode(); scene.RootNode.AddChild(groundMarkerNode); scene.ShowCameraImage = true; }