public void EnableTracking(MotionControllerModel mc) { if (mc.Design) { return; } if (_camera.Handle == IntPtr.Zero) { StartTracker(PSMoveTrackerExposure.Low); } ConsoleService.Write(string.Format("[Tracker, {0}] Calibrating Motion Controller ({1}).", _camera.GUID, mc.Serial)); byte r = (byte)((mc.Color.r * 255) + 0.5f); byte g = (byte)((mc.Color.g * 255) + 0.5f); byte b = (byte)((mc.Color.b * 255) + 0.5f); mc.TrackerStatus[_camera] = PsMoveApi.psmove_tracker_enable_with_color(_camera.Handle, mc.Handle, r, g, b); if (mc.TrackerStatus[_camera] == PSMoveTrackerStatus.Tracking || mc.TrackerStatus[_camera] == PSMoveTrackerStatus.Calibrated) { PsMoveApi.psmove_tracker_update_image(_camera.Handle); PsMoveApi.psmove_tracker_update(_camera.Handle, mc.Handle); mc.TrackerStatus[_camera] = PsMoveApi.psmove_tracker_get_status(_camera.Handle, mc.Handle); PsMoveApi.psmove_enable_orientation(mc.Handle, PSMoveBool.True); PsMoveApi.psmove_reset_orientation(mc.Handle); } //Matrix4x4 proj = new Matrix4x4(); //for (int row = 0; row < 4; row++) //{ // for (int col = 0; col < 4; col++) // { // proj[row, col] = PsMoveApi.PSMoveMatrix4x4_at(PsMoveApi.psmove_fusion_get_projection_matrix(_camera.Fusion), row * 4 + col); // } //} //mc.ProjectionMatrix[_camera] = proj; ConsoleService.Write(string.Format("[Tracker, {0}] Tracker Status of {1} = {2}", _camera.GUID, mc.Name, Enum.GetName(typeof(PSMoveTrackerStatus), mc.TrackerStatus[_camera]))); }
public void ProcessData(MotionControllerModel mc) { if (_camera.Handle != IntPtr.Zero) { Vector3 rawPosition = Vector3.zero; Vector3 fusionPosition = Vector3.zero; PSMoveTrackerStatus trackerStatus = mc.TrackerStatus[_camera]; if (!mc.Design) { trackerStatus = PsMoveApi.psmove_tracker_get_status(_camera.Handle, mc.Handle); } if (trackerStatus == PSMoveTrackerStatus.Tracking) { float rx, ry, rrad; float fx, fy, fz; PsMoveApi.psmove_tracker_get_position(_camera.Handle, mc.Handle, out rx, out ry, out rrad); PsMoveApi.psmove_fusion_get_position(_camera.Fusion, mc.Handle, out fx, out fy, out fz); rx = (int)(rx + 0.5); ry = (int)(ry + 0.5); rawPosition = new Vector3(rx, ry, rrad); fusionPosition = new Vector3(fx, fy, fz); } else if (mc.Design) { switch (_camera.Calibration.Index) { case 0: rawPosition = new Vector3(129, 280, 8.970074f); break; case 1: rawPosition = new Vector3(180, 293, 11.9714022f); break; case 2: rawPosition = new Vector3(528, 286, 9.038924f); break; case 3: rawPosition = new Vector3(389, 275, 6.530668f); break; } } mc.TrackerStatus[_camera] = trackerStatus; mc.RawPosition[_camera] = rawPosition; mc.FusionPosition[_camera] = fusionPosition; if (trackerStatus == PSMoveTrackerStatus.Tracking || mc.Design) { // controller position -> rectangle in surrounding the sphere in image coordinates PointF[] imgPts = CvHelper.GetImagePointsF(mc.RawPosition[_camera]); ExtrinsicCameraParameters ex = CameraCalibration.FindExtrinsicCameraParams2( _camera.Calibration.ObjectPoints2D, imgPts, _camera.Calibration.IntrinsicParameters); Matrix <double> coordinatesInCameraSpace_homo = new Matrix <double>(new double[] { ex.TranslationVector[0, 0], ex.TranslationVector[1, 0], ex.TranslationVector[2, 0], 1 }); mc.CameraPosition[_camera] = new Vector3( (float)coordinatesInCameraSpace_homo[0, 0], (float)coordinatesInCameraSpace_homo[1, 0], (float)coordinatesInCameraSpace_homo[2, 0]); ex.RotationVector[0, 0] += (Math.PI / 180) * (_camera.Calibration.RotX + _camera.Calibration.XAngle); ex.RotationVector[1, 0] += (Math.PI / 180) * (_camera.Calibration.RotY + _camera.Calibration.YAngle); ex.RotationVector[2, 0] += (Math.PI / 180) * (_camera.Calibration.RotZ + _camera.Calibration.ZAngle); _camera.Calibration.ExtrinsicParameters[mc.Id] = ex; Matrix <double> minusRotation = new Matrix <double>(3, 3); minusRotation = CvHelper.Rotate( -_camera.Calibration.RotX - _camera.Calibration.XAngle, -_camera.Calibration.RotY - _camera.Calibration.YAngle, -_camera.Calibration.RotZ - _camera.Calibration.ZAngle); Matrix <double> R3x3_cameraToWorld = new Matrix <double>(3, 3); R3x3_cameraToWorld = CvHelper.Rotate( _camera.Calibration.RotX, _camera.Calibration.RotY + _camera.Calibration.YAngle, _camera.Calibration.RotZ); Matrix <double> rotInv = new Matrix <double>(3, 3); CvInvoke.cvInvert(ex.RotationVector.RotationMatrix.Ptr, rotInv, SOLVE_METHOD.CV_LU); Matrix <double> test = CvHelper.ConvertToHomogenous(-1 * R3x3_cameraToWorld); _camera.Calibration.ObjectPointsProjected = CameraCalibration.ProjectPoints( _camera.Calibration.ObjectPoints3D, _camera.Calibration.ExtrinsicParameters[mc.Id], _camera.Calibration.IntrinsicParameters); Matrix <double> cameraPositionInWorldSpace4x4 = new Matrix <double>(new double[, ] { { 1, 0, 0, _camera.Calibration.TranslationToWorld[0, 0] }, { 0, 1, 0, _camera.Calibration.TranslationToWorld[1, 0] }, { 0, 0, 1, _camera.Calibration.TranslationToWorld[2, 0] }, { 0, 0, 0, 1 }, }); Matrix <double> Rt_homo = CvHelper.ConvertToHomogenous(R3x3_cameraToWorld); Matrix <double> x_world_homo = CvHelper.ConvertToHomogenous(minusRotation) * coordinatesInCameraSpace_homo; Rt_homo[0, 3] = x_world_homo[0, 0]; Rt_homo[1, 3] = x_world_homo[1, 0]; Rt_homo[2, 3] = x_world_homo[2, 0]; x_world_homo = cameraPositionInWorldSpace4x4 * x_world_homo; Vector3 v3world = new Vector3((float)x_world_homo[0, 0], (float)x_world_homo[1, 0], (float)x_world_homo[2, 0]); mc.WorldPosition[_camera] = v3world; for (int i = mc.PositionHistory[_camera].Length - 1; i > 0; --i) { mc.PositionHistory[_camera][i] = mc.PositionHistory[_camera][i - 1]; } mc.PositionHistory[_camera][0] = v3world; } } } // ProcessData