private void SetupMarkerTracking() { IVideoCapture captureDevice = null; Console.WriteLine(DateTime.Now.ToString() + " Setup Video & AR\n"); // Create our video capture device, depend from Type, "look App.config" if (CapDeviceType == "OpenCV") captureDevice = new OpenCVCapture(); else captureDevice = new DirectShowCapture(); //captureDevice.VideoDeviceID = 0; captureDevice.InitVideoCapture(SelDevice, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene scene.AddVideoCaptureDevice(captureDevice); ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, CalibFile, 32.4f); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background scene.ShowCameraImage = true; //Config Gravity from Camera Cord scene.PhysicsEngine.GravityDirection = -Vector3.UnitZ; }
private void SetupMarkerTracking() { IVideoCapture captureDevice = null; if (useStaticImage) { captureDevice = new NullCapture(); #if USE_NYARTOOLKIT captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._800x600, ImageFormat.B8G8R8A8_32, false); #else captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._800x600, ImageFormat.R8G8B8_24, false); #endif ((NullCapture)captureDevice).StaticImageFile = "MarkerImage"; } else { // Create our video capture device that uses DirectShow library. Note that // the combinations of resolution and frame rate that are allowed depend on // the particular video capture device. Thus, setting incorrect resolution // and frame rate values may cause exceptions or simply be ignored, depending // on the device driver. The values set here will work for a Microsoft VX 6000, // and many other webcams. captureDevice = new DirectShowCapture2(); #if USE_NYARTOOLKIT captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.B8G8R8A8_32, false); #else captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); #endif } // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); #if USE_NYARTOOLKIT NyARToolkitTracker tracker = new NyARToolkitTracker(); tracker.InitTracker(captureDevice.Width, captureDevice.Height, "camera_para.dat"); #else ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 32.4f); #endif // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; }
private void SetupMarkerTracking() { DirectShowCapture2 captureDevice = new DirectShowCapture2(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); // if we're using Wrap920AR, then we need to add another capture device for // processing stereo camera DirectShowCapture2 captureDevice2 = null; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { captureDevice2 = new DirectShowCapture2(); captureDevice2.InitVideoCapture(1, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice2); // Calculate the right projection matrix using the camera intrinsic parameters for the // right camera ((StereoCamera)scene.CameraNode.Camera).RightProjection = ALVARDllBridge.GetCameraProjection("Wrap920_1_Right.xml", captureDevice2.Width, captureDevice2.Height, 0.1f, 1000); } // Create an optical marker tracker that uses ALVAR library ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.ZNearPlane = 0.1f; tracker.ZFarPlane = 1000; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "Wrap920_1_Left.xml", markerSize); ((StereoCamera)scene.CameraNode.Camera).LeftProjection = tracker.CameraProjection; scene.MarkerTracker = tracker; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.TrackerVideoID = 0; } else { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 0; scene.TrackerVideoID = 0; } // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); // Add a transform node to tranlate the objects to be centered around the // marker board. TransformNode transNode = new TransformNode(); scene.RootNode.AddChild(groundMarkerNode); scene.ShowCameraImage = true; }
private void SetupMarkerTracking() { DirectShowCapture2 captureDevice = new DirectShowCapture2(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); // if we're using Wrap920AR, then we need to add another capture device for // processing stereo camera if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { DirectShowCapture2 captureDevice2 = new DirectShowCapture2(); captureDevice2.InitVideoCapture(1, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice2); } IMarkerTracker tracker = null; #if USE_ARTAG // Create an optical marker tracker that uses ARTag library tracker = new ARTagTracker(); // Set the configuration file to look for the marker specifications tracker.InitTracker(638.052f, 633.673f, captureDevice.Width, captureDevice.Height, false, "ARTag.cf"); #else // Create an optical marker tracker that uses ALVAR library tracker = new ALVARMarkerTracker(); ((ALVARMarkerTracker)tracker).MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0); #endif scene.MarkerTracker = tracker; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.TrackerVideoID = 0; } // Create a marker node to track a ground marker array. #if USE_ARTAG groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ground"); scene.RootNode.AddChild(groundMarkerNode); #else // Create an array to hold a list of marker IDs that are used in the marker // array configuration (even though these are already specified in the configuration // file, ALVAR still requires this array) int[] ids = new int[28]; for (int i = 0; i < ids.Length; i++) ids[i] = i; groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.txt", ids); // Add a transform node to tranlate the objects to be centered around the // marker board. TransformNode transNode = new TransformNode(); scene.RootNode.AddChild(groundMarkerNode); #endif scene.ShowCameraImage = true; }
private void SetupMarkerTracking() { DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice); ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0); scene.MarkerTracker = tracker; scene.ShowCameraImage = true; scene.PhysicsEngine.GravityDirection = -Vector3.UnitZ; // Create a marker node to track the ground plane markerNode = new MarkerNode(scene.MarkerTracker, "ARDominoALVAR.txt"); scene.RootNode.AddChild(markerNode); }
//Markers Functions /// <summary> /// Setup the marker tracking capture devices /// </summary> private void SetupMarkerTracking() { IVideoCapture captureDevice = null; try { captureDevice = new DirectShowCapture2(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); } catch { Console.WriteLine("Error: No Camera detected"); Exit(); } // Add this video capture device to the scene so that it can be used for // the marker tracker try { scene.AddVideoCaptureDevice(captureDevice); } catch { } // Create an optical marker tracker that uses ALVAR library ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; }
private void SetupCalibration() { leftCaptureDevice = new DirectShowCapture2(); leftCaptureDevice.InitVideoCapture(leftDeviceID, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add left video capture device to the scene for rendering left eye image scene.AddVideoCaptureDevice(leftCaptureDevice); rightCaptureDevice = new DirectShowCapture2(); rightCaptureDevice.InitVideoCapture(rightDeviceID, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add right video capture device to the scene for rendering right eye image scene.AddVideoCaptureDevice(rightCaptureDevice); // Create holders for retrieving the captured video images leftImagePtr = Marshal.AllocHGlobal(leftCaptureDevice.Width * leftCaptureDevice.Height * 3); rightImagePtr = Marshal.AllocHGlobal(rightCaptureDevice.Width * rightCaptureDevice.Height * 3); // Associate each video devices to each eye scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.ShowCameraImage = true; float markerSize = 32.4f; // Initialize a marker tracker for tracking an marker array used for calibration markerTracker = new ALVARMarkerTracker(); markerTracker.MaxMarkerError = 0.02f; markerTracker.ZNearPlane = 0.1f; markerTracker.ZFarPlane = 1000; markerTracker.InitTracker(leftCaptureDevice.Width, leftCaptureDevice.Height, LEFT_CALIB, markerSize); ((StereoCamera)scene.CameraNode.Camera).LeftProjection = markerTracker.CameraProjection; // Add another marker detector for tracking right video capture device ALVARDllBridge.alvar_add_marker_detector(markerSize, 5, 2); ALVARDllBridge.alvar_add_camera(RIGHT_CALIB, rightCaptureDevice.Width, rightCaptureDevice.Height); double[] projMat = new double[16]; double cameraFovX = 0, cameraFovY = 0; ALVARDllBridge.alvar_get_camera_params(1, projMat, ref cameraFovX, ref cameraFovY, 1000, 0.1f); ((StereoCamera)scene.CameraNode.Camera).RightProjection = new Matrix( (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3], (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7], (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11], (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]); // Add a marker array to be tracked markerID = markerTracker.AssociateMarker("ALVARGroundArray.xml"); relativeTransforms = new List<Matrix>(); }
private void SetupMarkerTracking() { DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice); // Use ALVAR marker tracker ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", markerSize); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. scene.ShowCameraImage = true; // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); // Create a marker node to track a toolbar marker array. toolbarMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARToolbar.xml"); scene.RootNode.AddChild(groundMarkerNode); scene.RootNode.AddChild(toolbarMarkerNode); }
private void SetupMarkerTracking() { DirectShowCapture2 captureDevice = new DirectShowCapture2(); captureDevice.InitVideoCapture(0, FrameRate._60Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice); // Use ALVAR marker tracker ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.ZNearPlane = arCameraNode.Camera.ZNearPlane; tracker.ZFarPlane = arCameraNode.Camera.ZFarPlane; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 32.4f); // Set the marker tracker to use for our scene scene.MarkerTracker = tracker; // Display the camera image in the background. Note that this parameter should // be set after adding at least one video capture device to the Scene class. scene.ShowCameraImage = true; // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); scene.RootNode.AddChild(groundMarkerNode); }