Example #1
0
 private extern static void transformENU2Geodetic(IntPtr datum, ref MCvPoint3D64f enu, ref GeodeticCoordinate refCoor, ref MCvPoint3D64f refEcef, ref GeodeticCoordinate coor);
Example #2
0
 /// <summary>
 /// Constructs an WArrow.
 /// </summary>
 /// <param name="pt1">Start point of the arrow.</param>
 /// <param name="pt2">End point of the arrow.</param>
 /// <param name="thickness">Thickness of the arrow. Thickness of arrow head is also adjusted accordingly.</param>
 /// <param name="color">Color of the arrow.</param>
 public WArrow(MCvPoint3D64f pt1, MCvPoint3D64f pt2, double thickness, MCvScalar color)
 {
     _ptr = CvInvoke.cveWArrowCreate(ref pt1, ref pt2, thickness, ref color, ref _widget3dPtr, ref _widgetPtr);
 }
Example #3
0
        /// <summary>
        /// Compute the minimum and maximum value from the points
        /// </summary>
        /// <param name="points">The points</param>
        /// <param name="min">The minimum x,y,z values</param>
        /// <param name="max">The maximum x,y,z values</param>
        public static void GetMinMax(IEnumerable <MCvPoint3D64f> points, out MCvPoint3D64f min, out MCvPoint3D64f max)
        {
            min   = new MCvPoint3D64f();
            min.X = min.Y = min.Z = double.MaxValue;
            max   = new MCvPoint3D64f();
            max.X = max.Y = max.Z = double.MinValue;

            foreach (MCvPoint3D64f p in points)
            {
                min.X = Math.Min(min.X, p.X);
                min.Y = Math.Min(min.Y, p.Y);
                min.Z = Math.Min(min.Z, p.Z);
                max.X = Math.Max(max.X, p.X);
                max.Y = Math.Max(max.Y, p.Y);
                max.Z = Math.Max(max.Z, p.Z);
            }
        }
Example #4
0
 internal static extern IntPtr cveWCircleCreate(double radius, ref MCvPoint3D64f center, ref MCvPoint3D64f normal, double thickness, ref MCvScalar color, ref IntPtr widget3d, ref IntPtr widget);
Example #5
0
 internal extern static void quaternionsToAxisAngle(ref Quaternions q, ref MCvPoint3D64f axisAngle);
 private extern static void transformNED2Geodetic(ref MCvPoint3D64f ned, ref GeodeticCoordinate refCoor, ref MCvPoint3D64f refEcef, ref GeodeticCoordinate coor);
Example #7
0
 internal static extern IntPtr cveWCylinderCreate(ref MCvPoint3D64f axisPoint1, ref MCvPoint3D64f axisPoint2, double radius, int numsides, ref MCvScalar color, ref IntPtr widget3d, ref IntPtr widget);
Example #8
0
 /// <summary>
 /// Constructs a WCube.
 /// </summary>
 /// <param name="minPoint">Specifies minimum point of the bounding box.</param>
 /// <param name="maxPoint">Specifies maximum point of the bounding box.</param>
 /// <param name="wireFrame">If true, cube is represented as wireframe.</param>
 /// <param name="color">Color of the cube.</param>
 public WCube(MCvPoint3D64f minPoint, MCvPoint3D64f maxPoint, bool wireFrame, MCvScalar color)
 {
     _ptr = CvInvoke.cveWCubeCreate(ref minPoint, ref maxPoint, wireFrame, ref color, ref _widget3dPtr, ref _widgetPtr);
 }
Example #9
0
 internal static extern IntPtr cveWCubeCreate(
     ref MCvPoint3D64f minPoint, ref MCvPoint3D64f maxPoint,
     [MarshalAs(CvInvoke.BoolMarshalType)]
     bool wireFrame, ref MCvScalar color,
     ref IntPtr widget3d, ref IntPtr widget);
Example #10
0
 /// <summary>
 /// Constructs repositioned planar cone.
 /// </summary>
 /// <param name="radius">Radius of the cone.</param>
 /// <param name="center">Center of the cone base.</param>
 /// <param name="tip">Tip of the cone.</param>
 /// <param name="resolution">Resolution of the cone.</param>
 /// <param name="color">Color of the cone.</param>
 public WCone(double radius, MCvPoint3D64f center, MCvPoint3D64f tip, int resolution, MCvScalar color)
 {
     _ptr = CvInvoke.cveWConeCreate(radius, ref center, ref tip, resolution, ref color, ref _widget3dPtr, ref _widgetPtr);
 }
Example #11
0
 internal static extern IntPtr cveWConeCreate(double radius, ref MCvPoint3D64f center, ref MCvPoint3D64f tip, int resolution, ref MCvScalar color, ref IntPtr widget3d, ref IntPtr widget);
Example #12
0
        /// <summary>
        /// Compute the minimum and maximum value from the points
        /// </summary>
        /// <param name="points">The points</param>
        /// <param name="min">The minimum x,y,z values</param>
        /// <param name="max">The maximum x,y,z values</param>
        public static void GetMinMax(IEnumerable <MCvPoint3D64f> points, out MCvPoint3D64f min, out MCvPoint3D64f max)
        {
            min   = new MCvPoint3D64f();
            min.x = min.y = min.z = double.MaxValue;
            max   = new MCvPoint3D64f();
            max.x = max.y = max.z = double.MinValue;

            foreach (MCvPoint3D64f p in points)
            {
                min.x = Math.Min(min.x, p.x);
                min.y = Math.Min(min.y, p.y);
                min.z = Math.Min(min.z, p.z);
                max.x = Math.Max(max.x, p.x);
                max.y = Math.Max(max.y, p.y);
                max.z = Math.Max(max.z, p.z);
            }
        }
        public void DoBundleAdjust()
        {
            // N = cameras
            // M = point count
            //public static void BundleAdjust(MCvPoint3D64f[M] points,              // Positions of points in global coordinate system (input and output), values will be modified by bundle adjustment
            //                                MCvPoint2D64f[M][N] imagePoints,      // Projections of 3d points for every camera
            //                                int[M][N] visibility,                 // Visibility of 3d points for every camera
            //                                Matrix<double>[N] cameraMatrix,       // Intrinsic matrices of all cameras (input and output), values will be modified by bundle adjustment
            //                                Matrix<double>[N] R,                  // rotation matrices of all cameras (input and output), values will be modified by bundle adjustment
            //                                Matrix<double>[N] T,                  // translation vector of all cameras (input and output), values will be modified by bundle adjustment
            //                                Matrix<double>[N] distCoefficients,   // distortion coefficients of all cameras (input and output), values will be modified by bundle adjustment
            //                                MCvTermCriteria termCrit)             // Termination criteria, a reasonable value will be (30, 1.0e-12)
            _stopwatchGet.Restart();
            if (_cameras.Cameras.Count == 0)
            {
                return;
            }

            IEnumerable <CameraModel> orderedCameras = _cameras.Cameras.OrderBy(camera => camera.Calibration.Index);
            ObservableCollection <MotionControllerModel> controllers = _cameras.Cameras[0].Controllers;

            if (controllers.Count == 0)
            {
                return;
            }

            float radius      = CameraCalibrationModel.SPHERE_RADIUS_CM;
            int   cameraCount = _cameras.Cameras.Count;
            int   pointCount  = 8;

            MCvPoint3D64f[]   objectPoints     = new MCvPoint3D64f[controllers.Count * pointCount];
            MCvPoint2D64f[][] imagePoints      = new MCvPoint2D64f[cameraCount][];
            int[][]           visibility       = new int[cameraCount][];
            Matrix <double>[] cameraMatrix     = new Matrix <double> [cameraCount];
            Matrix <double>[] R                = new Matrix <double> [cameraCount];
            Matrix <double>[] T                = new Matrix <double> [cameraCount];
            Matrix <double>[] distCoefficients = new Matrix <double> [cameraCount];
            MCvTermCriteria   termCrit         = new MCvTermCriteria(30, 1.0e-12);

            int visible = 0;

            foreach (CameraModel camera in orderedCameras)
            {
                visibility[camera.Calibration.Index]       = new int[controllers.Count * pointCount];
                cameraMatrix[camera.Calibration.Index]     = camera.Calibration.IntrinsicParameters.IntrinsicMatrix.Clone();
                distCoefficients[camera.Calibration.Index] = camera.Calibration.IntrinsicParameters.DistortionCoeffs.Clone();
                imagePoints[camera.Calibration.Index]      = new MCvPoint2D64f[controllers.Count * pointCount];
                R[camera.Calibration.Index] = camera.Calibration.RotationToWorld.Clone();
                T[camera.Calibration.Index] = camera.Calibration.TranslationToWorld.Clone();

                foreach (MotionControllerModel controller in controllers)
                {
                    float x = controller.RawPosition[camera].x;
                    float y = controller.RawPosition[camera].y;

                    //if (x == 0 && y == 0) return;

                    // controller is not visible
                    if (controller.TrackerStatus[camera] != PSMoveTrackerStatus.Tracking)
                    {
                        for (int i = 0; i < pointCount; i++)
                        {
                            visibility[camera.Calibration.Index][i + controller.Id * pointCount] = 0;
                        }
                    }
                    // controller is visible
                    else
                    {
                        Vector3[] history      = controller.PositionHistory[camera];
                        float     avgMagnitude = 0f;
                        for (int i = 1; i < history.Length; i++)
                        {
                            avgMagnitude += history[i].magnitude / (history.Length - 1);
                        }
                        // check deviation of newest position
                        if ((Math.Abs(((history[0].magnitude * 100) / avgMagnitude)) - 100) > 5)
                        {
                            for (int i = 0; i < pointCount; i++)
                            {
                                visibility[camera.Calibration.Index][i + controller.Id * pointCount] = 0;
                            }
                            continue;
                        }
                        visible++;
                        //double distance = 0.0;
                        int startIndex = controller.Id * pointCount;

                        //MCvPoint3D64f cameraPositionInWorld = new MCvPoint3D64f
                        //{
                        //    x = camera.Calibration.TranslationToWorld[0, 0],
                        //    y = camera.Calibration.TranslationToWorld[1, 0],
                        //    z = camera.Calibration.TranslationToWorld[2, 0]
                        //};

                        // set visibility and calculate distance of the controller relative to the camera
                        for (int i = startIndex; i < pointCount * controllers.Count; i++)
                        {
                            visibility[camera.Calibration.Index][i] = 1;
                            //double d = CvHelper.GetDistanceToPoint(cameraPositionInWorld,objectPoints[i]);
                            //distance += d / pointCount;
                        }

                        // initialize object's world coordinates
                        // calculate as the average of each camera's transformed world coordinate
                        float wx = controller.WorldPosition[camera].x;
                        float wy = controller.WorldPosition[camera].y;
                        float wz = controller.WorldPosition[camera].z;


                        objectPoints[startIndex]     += new MCvPoint3D32f(wx - radius, wy - radius, wz - radius);
                        objectPoints[startIndex + 1] += new MCvPoint3D32f(wx + radius, wy - radius, wz - radius);
                        objectPoints[startIndex + 2] += new MCvPoint3D32f(wx + radius, wy + radius, wz - radius);
                        objectPoints[startIndex + 3] += new MCvPoint3D32f(wx - radius, wy + radius, wz - radius);

                        objectPoints[startIndex + 4] += new MCvPoint3D32f(wx - radius, wy + radius, wz + radius);
                        objectPoints[startIndex + 5] += new MCvPoint3D32f(wx + radius, wy + radius, wz + radius);
                        objectPoints[startIndex + 6] += new MCvPoint3D32f(wx + radius, wy - radius, wz + radius);
                        objectPoints[startIndex + 7] += new MCvPoint3D32f(wx - radius, wy - radius, wz + radius);

                        //imagePoints[scvm.Camera.Calibration.Index] = Utils.GetImagePoints(mcvm.MotionController.RawPosition[scvm.Camera]);
                        imagePoints[camera.Calibration.Index] = Array.ConvertAll(camera.Calibration.ObjectPointsProjected, CvHelper.PointFtoPoint2D);
                    }
                } // foreach controller
            }     // foreach camera

            if (visible == 0)
            {
                return;
            }

            // average object points
            for (int i = 0; i < objectPoints.Length; i++)
            {
                objectPoints[i].x /= visible;
                objectPoints[i].y /= visible;
                objectPoints[i].z /= visible;
            }
            // calculate object's middle
            float prex = 0, prey = 0, prez = 0;

            for (int i = 0; i < objectPoints.Length; i++)
            {
                prex += (float)objectPoints[i].x / objectPoints.Length;
                prey += (float)objectPoints[i].y / objectPoints.Length;
                prez += (float)objectPoints[i].z / objectPoints.Length;
            }
            _stopwatchBA.Restart();
            //LevMarqSparse.BundleAdjust(objectPoints, imagePoints, visibility, cameraMatrix, R, T, distCoefficients, termCrit);
            _stopwatchBA.Stop();
            _stopwatchSet.Restart();

            // check for calucation error
            for (int i = 0; i < objectPoints.Length; i++)
            {
                if (objectPoints[i].x.ToString().Equals("NaN"))
                {
                    return;
                }
                if (objectPoints[i].y.ToString().Equals("NaN"))
                {
                    return;
                }
                if (objectPoints[i].z.ToString().Equals("NaN"))
                {
                    return;
                }
            }

            // save changed matrices
            foreach (CameraModel camera in orderedCameras)
            {
                if (visibility[camera.Calibration.Index][0] == 1)
                {
                    //RotationVector3D rot1 = new RotationVector3D();
                    //rot1.RotationMatrix = camera.Calibration.RotationToWorld;

                    //RotationVector3D rot2 = new RotationVector3D();
                    //rot2.RotationMatrix = R[camera.Calibration.Index];

                    //Console.WriteLine((int)(rot1[0, 0] * (180 / Math.PI)) + " " + (int)(rot2[0, 0] * (180 / Math.PI)));
                    //Console.WriteLine((int)(rot1[1, 0] * (180 / Math.PI)) + " " + (int)(rot2[1, 0] * (180 / Math.PI)));
                    //Console.WriteLine((int)(rot1[2, 0] * (180 / Math.PI)) + " " + (int)(rot2[2, 0] * (180 / Math.PI)) + Environment.NewLine);

                    //camera.Calibration.IntrinsicParameters.IntrinsicMatrix = cameraMatrix[camera.Calibration.Index];
                    //camera.Calibration.RotationToWorld = R[camera.Calibration.Index];
                    //camera.Calibration.TranslationToWorld = T[camera.Calibration.Index];
                    //camera.Calibration.IntrinsicParameters.DistortionCoeffs = distCoefficients[camera.Calibration.Index];

                    //camera.Calibration.XAngle = (int)(rot2[0, 0] * (180 / Math.PI));
                    //camera.Calibration.YAngle = (int)(rot2[1, 0] * (180 / Math.PI));
                    //camera.Calibration.ZAngle = (int)(rot2[2, 0] * (180 / Math.PI));
                }
            }

            // calculate object's middle
            float preCenterX = 0, preCenterY = 0, preCenterZ = 0;

            for (int i = 0; i < objectPoints.Length; i++)
            {
                preCenterX += (float)objectPoints[i].x / objectPoints.Length;
                preCenterY += (float)objectPoints[i].y / objectPoints.Length;
                preCenterZ += (float)objectPoints[i].z / objectPoints.Length;
            }
            Vector3 prePosition = new Vector3(preCenterX, -preCenterY, preCenterZ);

            if (prePosition != _positionHistory[0])
            {
                for (int i = _positionHistory.Length - 1; i > 0; --i)
                {
                    _positionHistory[i] = _positionHistory[i - 1];
                }
                _positionHistory[0] = prePosition;
            }

            //Vector3 avgPosition = Vector3.zero;
            //for (int i = 0; i < _positionHistory.Length; i++)
            //{
            //    avgPosition += _positionHistory[i] / _positionHistory.Length;
            //}

            // 0 predition, 1 correction / estimated

            Matrix <float> kalmanResults = FilterPoints(_kalmanXYZ, prePosition.x, prePosition.y, prePosition.z);

            Vector3 kalmanPosition = new Vector3(kalmanResults[1, 0], kalmanResults[1, 1], kalmanResults[1, 2]);

            _cameras.Position = kalmanPosition;

            _stopwatchGet.Stop();
            _stopwatchSet.Stop();
            for (int i = 0; i < 4; i++)
            {
                CameraModel camera = _cameras.Cameras[i];
                float       xr     = controllers[0].RawPosition[camera].x;
                float       yr     = controllers[0].RawPosition[camera].y;
                float       zr     = controllers[0].RawPosition[camera].z;
                float       xc     = controllers[0].CameraPosition[camera].x;
                float       yc     = controllers[0].CameraPosition[camera].y;
                float       zc     = controllers[0].CameraPosition[camera].z;
                string      str    = String.Format(new CultureInfo("en-US"), "{0},{1},{2},{3},{4},{5},{6},{7},{8}",
                                                   iteration,
                                                   xr,
                                                   yr,
                                                   zr,
                                                   PsMoveApi.psmove_tracker_distance_from_radius(camera.Handle, controllers[0].RawPosition[camera].z),
                                                   xc,
                                                   yc,
                                                   zc,
                                                   Math.Sqrt(xc * xc + yc * yc + zc * zc)
                                                   );
                if (camera.Calibration.Index == 0)
                {
                    if (csv0.Count > 0 && csv0[csv0.Count - 1].Contains(zr.ToString(new CultureInfo("en-US"))))
                    {
                    }
                    else
                    {
                        csv0.Add(str);
                    }
                }
                else if (camera.Calibration.Index == 1)
                {
                    if (csv1.Count > 0 && csv1[csv1.Count - 1].Contains(zr.ToString(new CultureInfo("en-US"))))
                    {
                    }
                    else
                    {
                        csv1.Add(str);
                    }
                }
                else if (camera.Calibration.Index == 2)
                {
                    if (csv2.Count > 0 && csv2[csv2.Count - 1].Contains(zr.ToString(new CultureInfo("en-US"))))
                    {
                    }
                    else
                    {
                        csv2.Add(str);
                    }
                }
                else if (camera.Calibration.Index == 3)
                {
                    if (csv3.Count > 0 && csv3[csv3.Count - 1].Contains(zr.ToString(new CultureInfo("en-US"))))
                    {
                    }
                    else
                    {
                        csv3.Add(str);
                    }
                }
            }
            csvTime.Add(String.Format(new CultureInfo("en-US"), "{0},{1},{2},{3}",
                                      iteration,
                                      _stopwatchGet.ElapsedMilliseconds,
                                      _stopwatchBA.ElapsedMilliseconds,
                                      _stopwatchSet.ElapsedMilliseconds));
            string strBA = String.Format(new CultureInfo("en-US"), "{0},{1},{2},{3},{4},{5},{6},{7},{8}",
                                         iteration,
                                         prePosition.x,
                                         prePosition.y,
                                         prePosition.z,
                                         Math.Sqrt(prePosition.x * prePosition.x + prePosition.y * prePosition.y + prePosition.z * prePosition.z),
                                         kalmanPosition.x,
                                         kalmanPosition.y,
                                         kalmanPosition.z,
                                         Math.Sqrt(kalmanPosition.x * kalmanPosition.x + kalmanPosition.y * kalmanPosition.y + kalmanPosition.z * kalmanPosition.z));

            if (csvBA.Count > 0 && csvBA[csvBA.Count - 1].Contains(prePosition.x.ToString(new CultureInfo("en-US"))))
            {
            }
            else
            {
                csvBA.Add(strBA);
            }
            iteration++;
            if (csvBA.Count == 100)
            {
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\avg_time.csv", csvTime);
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\distance.csv", csvBA);
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\distance0.csv", csv0);
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\distance1.csv", csv1);
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\distance2.csv", csv2);
                File.WriteAllLines(@"C:\\Users\\Johannes\\Documents\\GitHub\\Thesis\\Source\\distance3.csv", csv3);
            }
        }
Example #14
0
 private extern static void transformGeodetic2NED(IntPtr datum, ref GeodeticCoordinate coor, ref GeodeticCoordinate refCoor, ref MCvPoint3D64f refEcef, ref MCvPoint3D64f ned);
        /// <summary>
        /// Convert <paramref name="ned"/> to Geodetic coordinate using the reference coordinate <paramref name="refCoor"/>
        /// </summary>
        /// <param name="ned">The NED (North East Down) coordinate to be converted</param>
        /// <param name="refCoor">The reference Geodetic coordinate</param>
        /// <param name="refEcef"><paramref name="refCoor"/> in ECEF format. If this is provided, it speeds up the computation</param>
        /// <returns>The Geodetic coordinate</returns>
        public static GeodeticCoordinate NED2Geodetic(MCvPoint3D64f ned, GeodeticCoordinate refCoor, MCvPoint3D64f refEcef)
        {
#if PINVOKE
            GeodeticCoordinate coor = new GeodeticCoordinate();
            transformNED2Geodetic(ref ned, ref refCoor, ref refEcef, ref coor);
            return(coor);
#else
            MCvPoint3D64f enu = new MCvPoint3D64f(ned.y, ned.x, -ned.z);
            return(ENU2Geodetic(enu, refCoor, refEcef));
#endif
        }
        /// <summary>
        /// Convert <paramref name="coor"/> to ENU (East North UP) coordinate using the reference coordinate <paramref name="refCoor"/>
        /// </summary>
        /// <param name="coor">The Geodetic Coordinate to be converted</param>
        /// <param name="refCoor">The reference Geodetic coordinate</param>
        /// <param name="refEcef"><paramref name="refCoor"/> in ECEF format. If this is provided, it speeds up the computation</param>
        /// <returns>The ENU (East North UP) coordinate related to the reference coordinate</returns>
        public static MCvPoint3D64f Geodetic2ENU(GeodeticCoordinate coor, GeodeticCoordinate refCoor, MCvPoint3D64f refEcef)
        {
#if PINVOKE
            MCvPoint3D64f p = new MCvPoint3D64f();
            transformGeodetic2ENU(ref coor, ref refCoor, ref refEcef, ref p);
            return(p);
#else
            MCvPoint3D64f delta     = Geodetic2ECEF(coor) - refEcef;
            double        sinPhi    = Math.Sin(refCoor.Latitude);
            double        cosPhi    = Math.Cos(refCoor.Latitude);
            double        sinLambda = Math.Sin(refCoor.Longitude);
            double        cosLambda = Math.Cos(refCoor.Longitude);

            double cosLambda_DeltaX = cosLambda * delta.x;
            double sinLambda_DeltaY = sinLambda * delta.y;

            return(new MCvPoint3D64f(
                       -sinLambda * delta.x + cosLambda * delta.y,
                       -sinPhi * cosLambda_DeltaX - sinPhi * sinLambda_DeltaY + cosPhi * delta.z,
                       cosPhi * cosLambda_DeltaX + cosPhi * sinLambda_DeltaY + sinPhi * delta.z));
#endif
        }
 /// <summary>
 /// Convert <paramref name="ned"/> to Geodetic coordinate using the reference coordinate <paramref name="refCoor"/>
 /// </summary>
 /// <param name="ned">The NED (North East Down) coordinate to be converted</param>
 /// <param name="refCoor">The reference Geodetic coordinate</param>
 /// <returns>The Geodetic coordinate</returns>
 public static GeodeticCoordinate NED2Geodetic(MCvPoint3D64f ned, GeodeticCoordinate refCoor)
 {
     return(NED2Geodetic(ned, refCoor, Geodetic2ECEF(refCoor)));
 }
 private extern static void transformGeodetic2ECEF(ref GeodeticCoordinate coordinate, ref MCvPoint3D64f ecef);
Example #19
0
 /// <summary>
 /// Constructs a WCylinder.
 /// </summary>
 /// <param name="axisPoint1">A point1 on the axis of the cylinder.</param>
 /// <param name="axisPoint2">A point2 on the axis of the cylinder.</param>
 /// <param name="radius">Radius of the cylinder.</param>
 /// <param name="numsides">Resolution of the cylinder.</param>
 /// <param name="color">Color of the cylinder.</param>
 public WCylinder(ref MCvPoint3D64f axisPoint1, MCvPoint3D64f axisPoint2, double radius, int numsides, MCvScalar color)
 {
     _ptr = CvInvoke.cveWCylinderCreate(ref axisPoint1, ref axisPoint2, radius, numsides, ref color, ref _widget3dPtr, ref _widgetPtr);
 }
        /// <summary>
        /// Convert <paramref name="coor"/> to NED (North East Down) coordinate using the reference coordinate <paramref name="refCoor"/>
        /// </summary>
        /// <param name="coor">The Geodetic Coordinate to be converted</param>
        /// <param name="refCoor">The reference Geodetic coordinate</param>
        /// <param name="refEcef"><paramref name="refCoor"/> in ECEF format. If this is provided, it speeds up the computation</param>
        /// <returns>The NED (North East Down) coordinate related to the reference coordinate</returns>
        public static MCvPoint3D64f Geodetic2NED(GeodeticCoordinate coor, GeodeticCoordinate refCoor, MCvPoint3D64f refEcef)
        {
#if PINVOKE
            MCvPoint3D64f ned = new MCvPoint3D64f();
            transformGeodetic2NED(ref coor, ref refCoor, ref refEcef, ref ned);
            return(ned);
#else
            MCvPoint3D64f enu = Geodetic2ENU(coor, refCoor, refEcef);
            return(new MCvPoint3D64f(enu.y, enu.x, -enu.z));
#endif
        }
Example #21
0
 /// <summary>
 /// Constructs repositioned planar circle.
 /// </summary>
 /// <param name="radius">Radius of the circle.</param>
 /// <param name="center">Center of the circle.</param>
 /// <param name="normal">Normal of the plane in which the circle lies.</param>
 /// <param name="thickness">Thickness of the circle.</param>
 /// <param name="color">Color of the circle.</param>
 public WCircle(double radius, MCvPoint3D64f center, MCvPoint3D64f normal, double thickness, MCvScalar color)
 {
     _ptr = CvInvoke.cveWCircleCreate(radius, ref center, ref normal, thickness, ref color, ref _widget3dPtr, ref _widgetPtr);
 }
 /// <summary>
 /// Convert <paramref name="enu"/> to Geodetic coordinate using the reference coordinate <paramref name="refCoor"/>
 /// </summary>
 /// <param name="enu">The ENU (East North UP) coordinate to be converted</param>
 /// <param name="refCoor">The reference Geodetic coordinate</param>
 /// <returns>The Geodetic coordinate</returns>
 public static GeodeticCoordinate ENU2Geodetic(MCvPoint3D64f enu, GeodeticCoordinate refCoor)
 {
     return(ENU2Geodetic(enu, refCoor, Geodetic2ECEF(refCoor)));
 }
Example #23
0
        private void ProcessFrameFindFaces()
        {
            if (Options.StereoCalibrationOptions == null)
            {
                return;
            }

            var leftImageR  = new Image <Gray, byte>(new Size(_cameras[0].Image.Width, _cameras[0].Image.Height));
            var rightImageR = new Image <Gray, byte>(new Size(_cameras[1].Image.Width, _cameras[1].Image.Height));

            CvInvoke.cvRemap(_cameras[0].Image.Ptr, leftImageR.Ptr,
                             Options.StereoCalibrationOptions.MapXLeft, Options.StereoCalibrationOptions.MapYLeft, 0, new MCvScalar(0));

            CvInvoke.cvRemap(_cameras[1].Image.Ptr, rightImageR.Ptr,
                             Options.StereoCalibrationOptions.MapXRight, Options.StereoCalibrationOptions.MapYRight, 0, new MCvScalar(0));

            //PointCollection.ReprojectImageTo3D()

            // find first face points
            var leftFaceRegions  = Helper2D.GetFaceRegion2Ds(leftImageR, FaceWidth, FaceHeight, true, true);
            var rightFaceRegions = Helper2D.GetFaceRegion2Ds(rightImageR, FaceWidth, FaceHeight, true, true);

            FaceRegion2D leftFace;
            FaceRegion2D rightFace;

            if (leftFaceRegions != null &&
                rightFaceRegions != null &&
                (leftFace = leftFaceRegions.FirstOrDefault()) != null &&
                (rightFace = rightFaceRegions.FirstOrDefault()) != null)
            {
                var leftPoints  = new List <Point>();
                var rightPoints = new List <Point>();

                #region Points

                // face
                leftPoints.Add(new Point(leftFace.Face.Location.X + leftFace.Face.Width / 2, leftFace.Face.Location.Y + leftFace.Face.Height / 2));
                rightPoints.Add(new Point(rightFace.Face.Location.X + rightFace.Face.Width / 2, rightFace.Face.Location.Y + rightFace.Face.Height / 2));

                // left eye
                if (leftFace.LeftEye != null && rightFace.LeftEye != null)
                {
                    leftPoints.Add(new Point(leftFace.Face.Location.X + leftFace.LeftEye.Location.X + leftFace.LeftEye.Width / 2,
                                             leftFace.Face.Location.Y + leftFace.LeftEye.Location.Y + leftFace.LeftEye.Height / 2));

                    rightPoints.Add(new Point(rightFace.Face.Location.X + rightFace.LeftEye.Location.X + rightFace.LeftEye.Width / 2,
                                              rightFace.Face.Location.Y + rightFace.LeftEye.Location.Y + rightFace.LeftEye.Height / 2));
                }

                // right eye
                if (leftFace.RightEye != null && rightFace.RightEye != null)
                {
                    leftPoints.Add(new Point(leftFace.Face.Location.X + leftFace.RightEye.Location.X + leftFace.RightEye.Width / 2,
                                             leftFace.Face.Location.Y + leftFace.RightEye.Location.Y + leftFace.RightEye.Height / 2));

                    rightPoints.Add(new Point(rightFace.Face.Location.X + rightFace.RightEye.Location.X + rightFace.RightEye.Width / 2,
                                              rightFace.Face.Location.Y + rightFace.RightEye.Location.Y + rightFace.RightEye.Height / 2));
                }

                // mouth
                if (leftFace.Mouth != null && rightFace.Mouth != null)
                {
                    leftPoints.Add(new Point(leftFace.Face.Location.X + leftFace.Mouth.Location.X + leftFace.Mouth.Width / 2,
                                             leftFace.Face.Location.Y + leftFace.Mouth.Location.Y + leftFace.Mouth.Height / 2));

                    rightPoints.Add(new Point(rightFace.Face.Location.X + rightFace.Mouth.Location.X + rightFace.Mouth.Width / 2,
                                              rightFace.Face.Location.Y + rightFace.Mouth.Location.Y + rightFace.Mouth.Height / 2));
                }

                #endregion

                var pointCloud = new MCvPoint3D64f[leftPoints.Count];

                #region Calculate Point Cloud

                for (int i = 0; i < leftPoints.Count; i++)
                {
                    var d = rightPoints[i].X - leftPoints[i].X;

                    var X = leftPoints[i].X * Options.StereoCalibrationOptions.Q[0, 0] + Options.StereoCalibrationOptions.Q[0, 3];
                    var Y = leftPoints[i].Y * Options.StereoCalibrationOptions.Q[1, 1] + Options.StereoCalibrationOptions.Q[1, 3];
                    var Z = Options.StereoCalibrationOptions.Q[2, 3];
                    var W = d * Options.StereoCalibrationOptions.Q[3, 2] + Options.StereoCalibrationOptions.Q[3, 3];

                    X = X / W;
                    Y = Y / W;
                    Z = Z / W;

                    leftImageR.Draw(string.Format("{0:0.0} {1:0.0} {2:0.0}", X, Y, Z), ref _font, leftPoints[i], new Gray(255));
                    rightImageR.Draw(string.Format("{0:0.0} {1:0.0} {2:0.0}", X, Y, Z), ref _font, rightPoints[i], new Gray(255));

                    pointCloud[i] = new MCvPoint3D64f(X, Y, Z);
                }

                #endregion

                if (pointCloud.Length >= 4)
                {
                    var srcPoints = new Matrix <float>(pointCloud.Length, 3);
                    var dstPoints = new Matrix <float>(pointCloud.Length, 3);

                    for (int i = 0; i < pointCloud.Length; i++)
                    {
                        srcPoints[i, 0] = (float)pointCloud[i].x;
                        srcPoints[i, 1] = (float)pointCloud[i].y;
                        srcPoints[i, 2] = (float)pointCloud[i].z;

                        dstPoints[i, 0] = (float)pointCloud[i].x;
                        dstPoints[i, 1] = (float)pointCloud[i].y;
                        dstPoints[i, 2] = 0;
                    }

                    var mapMatrix = new Matrix <double>(3, 3);
                    CvInvoke.cvGetPerspectiveTransform(srcPoints.Ptr, dstPoints.Ptr, mapMatrix.Ptr);

                    try
                    {
                        if (_transformed == null)
                        {
                            _transformed = new Image <Gray, byte>(leftImageR.Width, leftImageR.Height);
                        }

                        CvInvoke.cvPerspectiveTransform(leftImageR.Ptr, _transformed.Ptr, mapMatrix.Ptr);

                        //_transformed = leftImageR.WarpAffine(mapMatrix, INTER.CV_INTER_CUBIC, WARP.CV_WARP_DEFAULT, new Gray(0));
                    }
                    catch (Exception ex)
                    {
                    }

                    //HomographyMatrix homographyMatrix = CameraCalibration.FindHomography(srcPoints, dstPoints, HOMOGRAPHY_METHOD.RANSAC, 2);

                    //_transformed = leftImageR.WarpPerspective(homographyMatrix, INTER.CV_INTER_CUBIC, WARP.CV_WARP_DEFAULT, new Gray(0));
                }
            }



            var oldLeft  = _cameras[0].Image;
            var oldRight = _cameras[1].Image;

            _cameras[0].Image = leftImageR;
            _cameras[1].Image = rightImageR;

            oldLeft.Dispose();
            oldRight.Dispose();
        }
 private extern static void transformECEF2Geodetic(ref MCvPoint3D64f ecef, ref GeodeticCoordinate coordinate);
Example #25
0
 internal extern static void quaternionsRotatePoint(ref Quaternions quaternions, ref MCvPoint3D64f point, ref MCvPoint3D64f pointDst);
        /// <summary>
        /// Convert <paramref name="enu"/> to Geodetic coordinate using the reference coordinate <paramref name="refCoor"/>
        /// </summary>
        /// <param name="enu">The ENU (East North UP) coordinate to be converted</param>
        /// <param name="refCoor">The reference Geodetic coordinate</param>
        /// <param name="refEcef"><paramref name="refCoor"/> in ECEF format. If this is provided, it speeds up the computation</param>
        /// <returns>The Geodetic coordinate</returns>
        public static GeodeticCoordinate ENU2Geodetic(MCvPoint3D64f enu, GeodeticCoordinate refCoor, MCvPoint3D64f refEcef)
        {
#if PINVOKE
            GeodeticCoordinate coor = new GeodeticCoordinate();
            transformENU2Geodetic(ref enu, ref refCoor, ref refEcef, ref coor);
            return(coor);
#else
            double sinPhi    = Math.Sin(refCoor.Latitude);
            double cosPhi    = Math.Cos(refCoor.Latitude);
            double sinLambda = Math.Sin(refCoor.Longitude);
            double cosLambda = Math.Cos(refCoor.Longitude);

            double        sinPhi_EnuY = sinPhi * enu.y;
            double        cosPhi_EnuZ = cosPhi * enu.z;
            MCvPoint3D64f ecefDelta   = new MCvPoint3D64f(
                -sinLambda * enu.x - cosLambda * sinPhi_EnuY + cosLambda * cosPhi_EnuZ,
                cosLambda * enu.x - sinLambda * sinPhi_EnuY + sinLambda * cosPhi_EnuZ,
                cosPhi * enu.y + sinPhi * enu.z);

            return(ECEF2Geodetic(ecefDelta + refEcef));
#endif
        }
Example #27
0
 internal static extern IntPtr cveWArrowCreate(ref MCvPoint3D64f pt1, ref MCvPoint3D64f pt2, double thickness, ref MCvScalar color, ref IntPtr widget3d, ref IntPtr widget);
 private extern static void transformGeodetic2ENU(ref GeodeticCoordinate coor, ref GeodeticCoordinate refCoor, ref MCvPoint3D64f refEcef, ref MCvPoint3D64f enu);
Example #29
0
 internal extern static void axisAngleToQuaternions(ref MCvPoint3D64f axisAngle, ref Quaternions q);
Example #30
0
        /// <summary>
        /// Convert <paramref name="ned"/> to Geodetic coordinate using the reference coordinate <paramref name="refCoor"/>
        /// </summary>
        /// <param name="ned">The NED (North East Down) coordinate to be converted</param>
        /// <param name="refCoor">The reference Geodetic coordinate</param>
        /// <param name="refEcef"><paramref name="refCoor"/> in ECEF format. If this is provided, it speeds up the computation</param>
        /// <returns>The Geodetic coordinate</returns>
        public GeodeticCoordinate NED2Geodetic(MCvPoint3D64f ned, GeodeticCoordinate refCoor, MCvPoint3D64f refEcef)
        {
            GeodeticCoordinate coor = new GeodeticCoordinate();

            transformNED2Geodetic(_ptr, ref ned, ref refCoor, ref refEcef, ref coor);
            return(coor);
        }