private void Calibrate() { string channelSeq = "RGB"; int nChannles = 3; captureDevice.GetImageTexture(null, ref imagePtr); double square_size = 22.8; // in millimeters if (ALVARDllBridge.alvar_calibrate_camera(cameraID, nChannles, channelSeq, channelSeq, imagePtr, square_size, ETALON_ROWS, ETALON_COLUMNS)) { if (useImageSequence) { Notifier.AddMessage(((NullCapture)captureDevice).StaticImageFile + " succeeded"); successCount++; } else { Notifier.AddMessage("Captured Image " + (captureCount + 1)); } captureCount++; } else if (useImageSequence) { Notifier.AddMessage(((NullCapture)captureDevice).StaticImageFile + " failed"); } }
public void GenerateXML() { ALVARDllBridge.alvar_init(); Matrix projection = ALVARDllBridge.GetCameraProjection(calibrationFilename, width, height, zNear, zFar); MatrixHelper.SaveMatrixToXML(projectionFilename, projection); }
private void SetupCalibration() { leftCaptureDevice = new DirectShowCapture2(); leftCaptureDevice.InitVideoCapture(leftDeviceID, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add left video capture device to the scene for rendering left eye image scene.AddVideoCaptureDevice(leftCaptureDevice); rightCaptureDevice = new DirectShowCapture2(); rightCaptureDevice.InitVideoCapture(rightDeviceID, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add right video capture device to the scene for rendering right eye image scene.AddVideoCaptureDevice(rightCaptureDevice); // Create holders for retrieving the captured video images leftImagePtr = Marshal.AllocHGlobal(leftCaptureDevice.Width * leftCaptureDevice.Height * 3); rightImagePtr = Marshal.AllocHGlobal(rightCaptureDevice.Width * rightCaptureDevice.Height * 3); // Associate each video devices to each eye scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.ShowCameraImage = true; float markerSize = 32.4f; // Initialize a marker tracker for tracking an marker array used for calibration markerTracker = new ALVARMarkerTracker(); markerTracker.MaxMarkerError = 0.02f; markerTracker.ZNearPlane = 0.1f; markerTracker.ZFarPlane = 1000; markerTracker.InitTracker(leftCaptureDevice.Width, leftCaptureDevice.Height, LEFT_CALIB, markerSize); ((StereoCamera)scene.CameraNode.Camera).LeftProjection = markerTracker.CameraProjection; // Add another marker detector for tracking right video capture device ALVARDllBridge.alvar_add_marker_detector(markerSize, 5, 2); ALVARDllBridge.alvar_add_camera(RIGHT_CALIB, rightCaptureDevice.Width, rightCaptureDevice.Height); double[] projMat = new double[16]; double cameraFovX = 0, cameraFovY = 0; ALVARDllBridge.alvar_get_camera_params(1, projMat, ref cameraFovX, ref cameraFovY, 1000, 0.1f); ((StereoCamera)scene.CameraNode.Camera).RightProjection = new Matrix( (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3], (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7], (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11], (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]); // Add a marker array to be tracked markerID = markerTracker.AssociateMarker("ALVARGroundArray.xml"); relativeTransforms = new List <Matrix>(); }
/// <summary> /// Processes the video image captured from an initialized video capture device. /// </summary> /// <param name="captureDevice">An initialized video capture device</param> public void ProcessImage(IVideoCapture captureDevice, IntPtr imagePtr) { String channelSeq = ""; int nChannles = 1; switch (captureDevice.Format) { case ImageFormat.R5G6B5_16: case ImageFormat.R8G8B8_24: channelSeq = "RGB"; nChannles = 3; break; case ImageFormat.R8G8B8A8_32: channelSeq = "RGBA"; nChannles = 4; break; case ImageFormat.B8G8R8_24: channelSeq = "BGR"; nChannles = 3; break; case ImageFormat.B8G8R8A8_32: channelSeq = "BGRA"; nChannles = 4; break; case ImageFormat.A8B8G8R8_32: channelSeq = "ARGB"; nChannles = 4; break; } featureFound = ALVARDllBridge.alvar_detect_feature(0, nChannles, channelSeq, channelSeq, imagePtr, MinInlierRatio, MinMappedPoints, ref inlierRatio, ref mappedPoints); if (featureFound) { ALVARDllBridge.alvar_get_feature_pose(poseMats); lastMarkerMatrix = new Matrix( (float)poseMats[0], (float)poseMats[1], (float)poseMats[2], (float)poseMats[3], (float)poseMats[4], (float)poseMats[5], (float)poseMats[6], (float)poseMats[7], (float)poseMats[8], (float)poseMats[9], (float)poseMats[10], (float)poseMats[11], (float)poseMats[12], (float)poseMats[13], (float)poseMats[14], (float)poseMats[15]); } }
private void FinalizeCalibration() { if (useImageSequence) { Notifier.AddMessage("Calibrating " + successCount + " images..."); } else { Notifier.AddMessage("Calibrating..."); } ALVARDllBridge.alvar_finalize_calibration(cameraID, calibrationFilename); Notifier.FadeOutTime = -1; Notifier.AddMessage("Finished calibration. Saved " + calibrationFilename); finalized = true; }
/// <summary> /// Processes the video image captured from an initialized video capture device. /// </summary> /// <param name="captureDevice">An initialized video capture device</param> public void ProcessImage(IVideoCapture captureDevice, IntPtr imagePtr) { String channelSeq = ""; int nChannles = 1; switch (captureDevice.Format) { case ImageFormat.R5G6B5_16: case ImageFormat.R8G8B8_24: channelSeq = "RGB"; nChannles = 3; break; case ImageFormat.R8G8B8A8_32: channelSeq = "RGBA"; nChannles = 4; break; case ImageFormat.B8G8R8_24: channelSeq = "BGR"; nChannles = 3; break; case ImageFormat.B8G8R8A8_32: channelSeq = "BGRA"; nChannles = 4; break; case ImageFormat.A8B8G8R8_32: channelSeq = "ARGB"; nChannles = 4; break; } int interestedMarkerNums = singleMarkerIDs.Count; int foundMarkerNums = 0; ALVARDllBridge.alvar_detect_marker(detectorID, cameraID, nChannles, channelSeq, channelSeq, imagePtr, singleMarkerIDsPtr, ref foundMarkerNums, ref interestedMarkerNums, max_marker_error, max_track_error); Process(interestedMarkerNums, foundMarkerNums); }
/// <summary> /// Initilizes the marker tracker with a set of configuration parameters. /// </summary> /// <param name="configs"> /// There are two ways to pass the parameters. One way is to pass in the order of /// (int imageWidth, int imageHeight, String cameraCalibFilename, double markerSize, /// int markerRes, double margin), and the other way is (int imageWidth, int imageHeight, /// String cameraCalibFilename, double markerSize). /// </param> public void InitTracker(params Object[] configs) { if (configs.Length != 3) { throw new MarkerException(GetInitTrackerUsage()); } int img_width = 0; int img_height = 0; try { img_width = (int)configs[0]; img_height = (int)configs[1]; configFilename = (String)configs[2]; } catch (Exception) { throw new MarkerException(GetInitTrackerUsage()); } ALVARDllBridge.alvar_init(); int ret = ALVARDllBridge.alvar_add_camera(configFilename, img_width, img_height); if (ret < 0) { throw new MarkerException("Camera calibration file is either not specified or not found"); } ALVARDllBridge.alvar_add_fern_estimator(configFilename, img_width, img_height); double[] projMat = new double[16]; ALVARDllBridge.alvar_get_camera_params(0, projMat, ref cameraFovX, ref cameraFovY, zFarPlane, zNearPlane); camProjMat = new Matrix( (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3], (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7], (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11], (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]); initialized = true; }
private void SetupCalibration() { if (useImageSequence) { captureDevice = new NullCapture(); } else { captureDevice = new DirectShowCapture(); } captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); if (useImageSequence) { imageNames = Directory.GetFiles(imageDirectory); if (imageNames != null && imageNames.Length > 0) { ((NullCapture)captureDevice).StaticImageFile = imageNames[0]; } else { MessageBox.Show("No images are found in " + imageDirectory + " for static image calibration"); this.Exit(); } } // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); imagePtr = Marshal.AllocHGlobal(captureDevice.Width * captureDevice.Height * 3); scene.ShowCameraImage = true; // Initializes ALVAR camera ALVARDllBridge.alvar_init(); ALVARDllBridge.alvar_add_camera(null, captureDevice.Width, captureDevice.Height); }
/// <summary> /// Associates a marker with an identifier so that the identifier can be used to find this /// marker after processing the image. /// </summary> /// <param name="markerConfigs">There are three ways to pass the parameters; (int markerID), /// (int markerID, double markerSize), or (String multiMarkerConfig). </param> /// <returns>An identifier for this marker object</returns> public Object AssociateMarker(params Object[] markerConfigs) { // make sure we are initialized if (!initialized) { throw new MarkerException("ALVARFeatureTracker is not initialized. Call InitTracker(...)"); } if (markerConfigs.Length != 1) { throw new MarkerException(GetAssocMarkerUsage()); } Object id = null; if (markerConfigs[0] is string) { String classifierName = (String)markerConfigs[0]; if (classifierName.Equals("")) { throw new MarkerException(GetAssocMarkerUsage()); } else { if (ALVARDllBridge.alvar_add_feature_detector(classifierName) != 0) { throw new MarkerException(classifierName + " is not found or not an appropriate .dat file"); } id = classifierName; } } else { throw new MarkerException(GetAssocMarkerUsage()); } return(id); }
private void SetupMarkerTracking() { DirectShowCapture captureDevice = new DirectShowCapture(); captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); // Add this video capture device to the scene so that it can be used for // the marker tracker scene.AddVideoCaptureDevice(captureDevice); // if we're using Wrap920AR, then we need to add another capture device for // processing stereo camera DirectShowCapture captureDevice2 = null; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { captureDevice2 = new DirectShowCapture(); captureDevice2.InitVideoCapture(1, FrameRate._30Hz, Resolution._640x480, ImageFormat.R8G8B8_24, false); scene.AddVideoCaptureDevice(captureDevice2); // Calculate the right projection matrix using the camera intrinsic parameters for the // right camera ((StereoCamera)scene.CameraNode.Camera).RightProjection = ALVARDllBridge.GetCameraProjection("Wrap920_Right.xml", captureDevice2.Width, captureDevice2.Height, 0.1f, 1000); } // Create an optical marker tracker that uses ALVAR library ALVARMarkerTracker tracker = new ALVARMarkerTracker(); tracker.MaxMarkerError = 0.02f; tracker.ZNearPlane = 0.1f; tracker.ZFarPlane = 1000; tracker.InitTracker(captureDevice.Width, captureDevice.Height, "Wrap920_Left.xml", markerSize); ((StereoCamera)scene.CameraNode.Camera).LeftProjection = tracker.CameraProjection; scene.MarkerTracker = tracker; if (iTracker.ProductID == iWearDllBridge.IWRProductID.IWR_PROD_WRAP920) { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 1; scene.TrackerVideoID = 0; } else { scene.LeftEyeVideoID = 0; scene.RightEyeVideoID = 0; scene.TrackerVideoID = 0; } // Create a marker node to track a ground marker array. groundMarkerNode = new MarkerNode(scene.MarkerTracker, "ALVARGroundArray.xml"); // Add a transform node to tranlate the objects to be centered around the // marker board. TransformNode transNode = new TransformNode(); scene.RootNode.AddChild(groundMarkerNode); scene.ShowCameraImage = true; }
private void Process(int interestedMarkerNums, int foundMarkerNums) { detectedMarkers.Clear(); detectedMultiMarkers.Clear(); if (foundMarkerNums <= 0) { return; } int id = 0; if (interestedMarkerNums > 0) { if (prevMarkerNum != interestedMarkerNums) { ids = new int[interestedMarkerNums]; poseMats = new double[interestedMarkerNums * 16]; idPtr = Marshal.AllocHGlobal(interestedMarkerNums * sizeof(int)); posePtr = Marshal.AllocHGlobal(interestedMarkerNums * 16 * sizeof(double)); } ALVARDllBridge.alvar_get_poses(detectorID, idPtr, posePtr); prevMarkerNum = interestedMarkerNums; Marshal.Copy(idPtr, ids, 0, interestedMarkerNums); Marshal.Copy(posePtr, poseMats, 0, interestedMarkerNums * 16); for (int i = 0; i < interestedMarkerNums; i++) { id = ids[i]; // If same marker ID exists, then we ignore the 2nd one if (detectedMarkers.ContainsKey(id)) { // do nothing } else { int index = i * 16; Matrix mat = new Matrix( (float)poseMats[index], (float)poseMats[index + 1], (float)poseMats[index + 2], (float)poseMats[index + 3], (float)poseMats[index + 4], (float)poseMats[index + 5], (float)poseMats[index + 6], (float)poseMats[index + 7], (float)poseMats[index + 8], (float)poseMats[index + 9], (float)poseMats[index + 10], (float)poseMats[index + 11], (float)poseMats[index + 12], (float)poseMats[index + 13], (float)poseMats[index + 14], (float)poseMats[index + 15]); detectedMarkers.Add(id, mat); } } } if (multiMarkerIDs.Count == 0) { return; } double error = -1; ALVARDllBridge.alvar_get_multi_marker_poses(detectorID, cameraID, detectAdditional, multiIdPtr, multiPosePtr, multiErrorPtr); Marshal.Copy(multiIdPtr, multiIDs, 0, multiMarkerIDs.Count); Marshal.Copy(multiPosePtr, multiPoseMats, 0, multiMarkerIDs.Count * 16); Marshal.Copy(multiErrorPtr, multiErrors, 0, multiMarkerIDs.Count); for (int i = 0; i < multiMarkerIDs.Count; i++) { id = multiIDs[i]; error = multiErrors[i]; if (error == -1) { continue; } int index = i * 16; Matrix mat = new Matrix( (float)multiPoseMats[index], (float)multiPoseMats[index + 1], (float)multiPoseMats[index + 2], (float)multiPoseMats[index + 3], (float)multiPoseMats[index + 4], (float)multiPoseMats[index + 5], (float)multiPoseMats[index + 6], (float)multiPoseMats[index + 7], (float)multiPoseMats[index + 8], (float)multiPoseMats[index + 9], (float)multiPoseMats[index + 10], (float)multiPoseMats[index + 11], (float)multiPoseMats[index + 12], (float)multiPoseMats[index + 13], (float)multiPoseMats[index + 14], (float)multiPoseMats[index + 15]); detectedMultiMarkers.Add(multiMarkerIDs[i], mat); } }
/// <summary> /// Associates a marker with an identifier so that the identifier can be used to find this /// marker after processing the image. /// </summary> /// <param name="markerConfigs">There are three ways to pass the parameters; (int markerID), /// (int markerID, double markerSize), or (String multiMarkerConfig). </param> /// <returns>An identifier for this marker object</returns> public Object AssociateMarker(params Object[] markerConfigs) { // make sure we are initialized if (!initialized) { throw new MarkerException("ALVARMarkerTracker is not initialized. Call InitTracker(...)"); } if (!(markerConfigs.Length == 1 || markerConfigs.Length == 2)) { throw new MarkerException(GetAssocMarkerUsage()); } Object id = null; if (markerConfigs.Length == 1) { if (markerConfigs[0] is string) { String markerConfigName = (String)markerConfigs[0]; if (markerConfigName.Equals("")) { throw new MarkerException(GetAssocMarkerUsage()); } else { ALVARDllBridge.alvar_add_multi_marker(markerConfigName); id = markerConfigName; } multiMarkerIDs.Add((String)id); multiMarkerID++; multiIdPtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * sizeof(int)); multiPosePtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * 16 * sizeof(double)); multiErrorPtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * sizeof(double)); multiIDs = new int[multiMarkerIDs.Count]; multiPoseMats = new double[multiMarkerIDs.Count * 16]; multiErrors = new double[multiMarkerIDs.Count]; } else if (markerConfigs[0] is int) { id = markerConfigs[0]; int markerID = (int)markerConfigs[0]; singleMarkerIDs.Add(markerID); singleMarkerIDsPtr = Marshal.AllocHGlobal(singleMarkerIDs.Count * sizeof(int)); unsafe { int *dest = (int *)singleMarkerIDsPtr; for (int i = 0; i < singleMarkerIDs.Count; i++) { *(dest + i) = singleMarkerIDs[i]; } } } else { throw new MarkerException(GetAssocMarkerUsage()); } } else { try { if (markerConfigs[0] is int) { id = markerConfigs[0]; int markerID = (int)markerConfigs[0]; double markerSize = Double.Parse(markerConfigs[1].ToString()); ALVARDllBridge.alvar_set_marker_size(detectorID, markerID, markerSize); singleMarkerIDs.Add(markerID); singleMarkerIDsPtr = Marshal.AllocHGlobal(singleMarkerIDs.Count * sizeof(int)); unsafe { int *dest = (int *)singleMarkerIDsPtr; for (int i = 0; i < singleMarkerIDs.Count; i++) { *(dest + i) = singleMarkerIDs[i]; } } } else { String markerConfigName = (String)markerConfigs[0]; if (markerConfigName.Equals("")) { throw new MarkerException(GetAssocMarkerUsage()); } else { ALVARDllBridge.alvar_add_multi_marker(markerConfigName); id = markerConfigName; } multiMarkerIDs.Add((String)id); multiMarkerID++; multiIdPtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * sizeof(int)); multiPosePtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * 16 * sizeof(double)); multiErrorPtr = Marshal.AllocHGlobal(multiMarkerIDs.Count * sizeof(double)); multiIDs = new int[multiMarkerIDs.Count]; multiPoseMats = new double[multiMarkerIDs.Count * 16]; multiErrors = new double[multiMarkerIDs.Count]; } } catch (Exception) { throw new MarkerException(GetAssocMarkerUsage()); } } return(id); }
/// <summary> /// Initilizes the marker tracker with a set of configuration parameters. /// </summary> /// <param name="configs"> /// There are two ways to pass the parameters. One way is to pass in the order of /// (int imageWidth, int imageHeight, String cameraCalibFilename, double markerSize, /// int markerRes, double margin), and the other way is (int imageWidth, int imageHeight, /// String cameraCalibFilename, double markerSize). /// </param> public void InitTracker(params Object[] configs) { if (!(configs.Length == 4 || configs.Length == 6)) { throw new MarkerException(GetInitTrackerUsage()); } int markerRes = 5; double markerSize = 1, margin = 2; int img_width = 0; int img_height = 0; if (configs.Length == 4) { try { img_width = (int)configs[0]; img_height = (int)configs[1]; configFilename = (String)configs[2]; markerSize = Double.Parse(configs[3].ToString()); } catch (Exception) { throw new MarkerException(GetInitTrackerUsage()); } } else { try { img_width = (int)configs[0]; img_height = (int)configs[1]; configFilename = (String)configs[2]; markerSize = Double.Parse(configs[3].ToString()); markerRes = (int)configs[4]; margin = Double.Parse(configs[5].ToString()); } catch (Exception) { throw new MarkerException(GetInitTrackerUsage()); } } ALVARDllBridge.alvar_init(); int ret = ALVARDllBridge.alvar_add_camera(configFilename, img_width, img_height); if (ret < 0) { throw new MarkerException("Camera calibration file is either not specified or not found"); } cameraID = ret; double[] projMat = new double[16]; ALVARDllBridge.alvar_get_camera_params(cameraID, projMat, ref cameraFovX, ref cameraFovY, zFarPlane, zNearPlane); camProjMat = new Matrix( (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3], (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7], (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11], (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]); detectorID = ALVARDllBridge.alvar_add_marker_detector(markerSize, markerRes, margin); initialized = true; }