예제 #1
0
 public void SetZNormal()
 {
     VX = myEndXAxis - myOrigin;
     if (IsSameDbl(VX.Length, 0))
     {
         return;
     }
     VX.Normalise();
     VY = myEndYAxis - myOrigin;
     if (IsSameDbl(VY.Length, 0))
     {
         return;
     }
     VY.Normalise();
     VZ = VX.Cross(VY);
     if (IsSameDbl(VZ.Length, 0))
     {
         return;
     }
     VZ.Normalise();
     VY = VZ.Cross(VX);
     if (IsSameDbl(VY.Length, 0))
     {
         return;
     }
     VY.Normalise();
 }
예제 #2
0
        public void LoadData(string fileName)
        {
            var buffer = System.IO.File.ReadAllLines(fileName);

            foreach (var item in buffer)
            {
                var temp = item.Split(',');
                VX.Add(double.Parse(temp[0]));
                VY.Add(double.Parse(temp[1]));
            }
        }
예제 #3
0
        private void SampleForm_FormClosing(object sender, FormClosingEventArgs e)
        {
            // Query graph performance using VX_GRAPH_PERFORMANCE and print timing
            // in milliseconds. Note that time units of vx_perf_t fields are nanoseconds.
            Perf perfHarris = new Perf(), perfTrack = new Perf();

            VX.Query(_GraphHarris, GraphAttribute.Performance, out perfHarris);
            VX.Query(_GraphTrack, GraphAttribute.Performance, out perfTrack);

            // Release all the OpenVX objects created in this exercise, and make the context as the last one to release.
            // To release an OpenVX object, you need to call vxRelease<Object> API which takes a pointer to the object.
            // If the release operation is successful, the OpenVX framework will reset the object to NULL.

            VX.Release(ref _Context);
        }
예제 #4
0
        public void SetEndPointBasedOnZVectors()
        {
            int n = 0;

            if (myPts1.Count == 0)
            {
                return;
            }

            //Let's try without this for a while:
            //CompactMarkers()

            myOrigin = ProcessPointListPair(myPts1, mySeenFromCameraPoints, ref n);
            if (myOrigin == null || myOrigin.Length < myTol || n == 0)
            {
                goto quitOut;
            }

            VX = AverageAxis(myPts1, myPts2);
            if (VX == null || VX.Length < myTol)
            {
                goto quitOut;
            }
            VX.Normalise();
            myEndXAxis = myOrigin + VX;

            VY = AverageAxis(myPts1, myPts3);
            if (VY == null || VY.Length < myTol)
            {
                goto quitOut;
            }
            VY.Normalise();
            myEndYAxis = myOrigin + VY;

            SetZNormal();
            myEndXAxis = myOrigin + VX;
            myEndYAxis = myOrigin + VY;

            SetEndPoint();

            return;

quitOut:
            myOrigin   = new clsPoint3d();
            myEndXAxis = new clsPoint3d();
            myEndYAxis = new clsPoint3d();
            myPoint    = new clsPoint3d();
        }
예제 #5
0
        /// <summary>
        /// Math equation linear : Y = B1*X + B0
        /// </summary>
        public void Solve(out double B0, out double B1)
        {
            double xbar = VX.Sum() / VX.Count;
            double ybar = VY.Sum() / VY.Count;
            double sumX2 = VX.Sum(t => t * t);
            double xxbar = 0, yybar = 0, xybar = 0;

            for (int i = 0; i < VX.Count; i++)
            {
                xxbar += (VX[i] - xbar) * (VX[i] - xbar);
                yybar += (VY[i] - ybar) * (VY[i] - ybar);
                xybar += (VX[i] - xbar) * (VY[i] - ybar);
            }
            B1 = xybar / xxbar;
            B0 = ybar - B1 * xbar;
        }
예제 #6
0
        private void AssertValidInput()
        {
            if (VX.GetLength(0) != NX ||
                VX.GetLength(1) != NY)
            {
                throw new Exception("Dimensions of VX are not compatible with T.");
            }

            if (VY.GetLength(0) != NX ||
                VY.GetLength(1) != NY)
            {
                throw new Exception("Dimensions of VY are not compatible with T.");
            }

            AssertPositiveT();
            AssertValidVelocity();

            if (MaxCFL <= 0)
            {
                throw new Exception("MaxCFL <= 0.");
            }
        }
예제 #7
0
        public override string ToString()
        {
            string stroka = "";

            stroka = n.ToString() + " ";
            foreach (double M in m)
            {
                stroka += M.ToString() + " ";
            }
            foreach (double X in x)
            {
                stroka += X.ToString() + " ";
            }
            foreach (double Y in y)
            {
                stroka += Y.ToString() + " ";
            }
            foreach (double Z in z)
            {
                stroka += Z.ToString() + " ";
            }
            foreach (double VX in vx)
            {
                stroka += VX.ToString() + " ";
            }
            foreach (double VY in vy)
            {
                stroka += VY.ToString() + " ";
            }
            foreach (double VZ in vz)
            {
                stroka += VZ.ToString() + " ";
            }
            stroka = stroka + "\n";
            return(String.Format("{0}", stroka));
        }
예제 #8
0
        private void SampleForm_Load(object sender, EventArgs e)
        {
            #region Corner Tracking Initialization

            uint                width = 1024, height = 1024;
            UIntPtr             max_keypoint_count      = (UIntPtr)10000;           // maximum number of keypoints to track
            float               harris_strength_thresh  = 0.0005f;                  // minimum corner strength to keep a corner
            float               harris_min_distance     = 5.0f;                     // radial L2 distance for non-max suppression
            float               harris_sensitivity      = 0.04f;                    // multiplier k in det(A) - k * trace(A)^2
            int                 harris_gradient_size    = 3;                        // window size for gradient computation
            int                 harris_block_size       = 3;                        // block window size for Harris corner score
            UIntPtr             lk_pyramid_levels       = (UIntPtr)6;               // number of pyramid levels for optical flow
            float               lk_pyramid_scale        = VX.SCALE_PYRAMID_HALF;    // pyramid levels scale by factor of two
            TerminationCriteria lk_termination          = TerminationCriteria.Both; // iteration termination criteria (eps & iterations)
            float               lk_epsilon              = 0.01f;                    // convergence criterion
            uint                lk_num_iterations       = 5;                        // maximum number of iterations
            bool                lk_use_initial_estimate = false;                    // don't use initial estimate
            uint                lk_window_dimension     = 6;                        // window size for evaluation
            float               trackable_kp_ratio_thr  = 0.8f;                     // threshold for the ration of tracked keypoints to all

            // Create the OpenVX context and make sure the returned context is valid.
            _Context = VX.CreateContext();

            // Create OpenVX image object for input RGB image.
            _ImageInput = VX.CreateImage(_Context, width, height, DfImage.Rgb);

            // OpenVX optical flow functionality requires image pyramids for the current
            // and the previous image. It also requires keypoints that correspond
            // to the previous pyramid and will output updated keypoints into
            // another keypoint array. To be able to toggle between the current and
            // the previous buffers, you need to use OpenVX delay objects and vxAgeDelay().
            // Create OpenVX pyramid and array object exemplars and create OpenVX delay
            // objects for both to hold two of each. Note that the exemplar objects are not
            // needed once the delay objects are created.
            using (Pyramid pyramid = VX.CreatePyramid(_Context, lk_pyramid_levels, lk_pyramid_scale, width, height, DfImage.U8))
                _PyramidDelay = VX.CreateDelay(_Context, pyramid, (UIntPtr)2);

            using (OpenVX.Array keypoints = VX.CreateArray(_Context, OpenVX.Type.Keypoint, max_keypoint_count))
                _KeypointsDelay = VX.CreateDelay(_Context, keypoints, (UIntPtr)2);


            // An object from a delay slot can be accessed using vxGetReferenceFromDelay API.
            // You need to use index = 0 for the current object and index = -1 for the previous object.

            _PyramidCurrent    = VX.GetReferenceFromDelay(_PyramidDelay, 0);
            _PyramidPrevious   = VX.GetReferenceFromDelay(_PyramidDelay, -1);
            _KeypointsCurrent  = VX.GetReferenceFromDelay(_KeypointsDelay, 0);
            _KeypointsPrevious = VX.GetReferenceFromDelay(_KeypointsDelay, -1);

            // Harris and optical flow algorithms require their own graph objects.
            // The Harris graph needs to extract gray scale image out of input RGB,
            // compute an initial set of keypoints, and compute an initial pyramid for use
            // by the optical flow graph.
            Graph graphHarris = VX.CreateGraph(_Context);
            Graph graphTrack  = VX.CreateGraph(_Context);

            // Harris and pyramid computation expect input to be an 8-bit image.
            // Given that input is an RGB image, it is best to extract a gray image
            // from RGB image, which requires two steps:
            //   - perform RGB to IYUV color conversion
            //   - extract Y channel from IYUV image
            // This requires two intermediate OpenVX image objects. Since you don't
            // need to access these objects from the application, they can be virtual
            // objects that can be created using the vxCreateVirtualImage API.
            OpenVX.Image harris_yuv_image       = VX.CreateVirtualImage(graphHarris, width, height, DfImage.Iyuv);
            OpenVX.Image harris_gray_image      = VX.CreateVirtualImage(graphHarris, width, height, DfImage.U8);
            OpenVX.Image opticalflow_yuv_image  = VX.CreateVirtualImage(graphTrack, width, height, DfImage.Iyuv);
            OpenVX.Image opticalflow_gray_image = VX.CreateVirtualImage(graphTrack, width, height, DfImage.U8);

            // The Harris corner detector and optical flow nodes (see "VX/vx_nodes.h")
            // need several scalar objects as parameters.
            Scalar strength_thresh      = VX.CreateScalar(_Context, ref harris_strength_thresh);
            Scalar min_distance         = VX.CreateScalar(_Context, ref harris_min_distance);
            Scalar sensitivity          = VX.CreateScalar(_Context, ref harris_sensitivity);
            Scalar epsilon              = VX.CreateScalar(_Context, ref lk_epsilon);
            Scalar num_iterations       = VX.CreateScalar(_Context, ref lk_num_iterations);
            Scalar use_initial_estimate = VX.CreateScalar(_Context, ref lk_use_initial_estimate);

            // Now all the objects have been created for building the graphs.
            // First, build a graph that performs Harris corner detection and initial pyramid computation.
            // See "VX/vx_nodes.h" for APIs how to add nodes into a graph.
            Node[] nodesHarris = new Node[] {
                VX.ColorConvertNode(graphHarris, _ImageInput, harris_yuv_image),
                VX.ChannelExtractNode(graphHarris, harris_yuv_image, Channel.ChannelY, harris_gray_image),
                VX.GaussianPyramidNode(graphHarris, harris_gray_image, _PyramidCurrent),
                VX.HarrisCornersNode(graphHarris, harris_gray_image, strength_thresh, min_distance, sensitivity, harris_gradient_size, harris_block_size, _KeypointsCurrent, Reference.Null)
            };
            VX.Release(nodesHarris);

            VX.VerifyGraph(graphHarris);

            // Now, build a graph that computes image pyramid for the next frame,
            // and tracks features using optical flow.
            Node[] nodesTrack = new Node[] {
                VX.ColorConvertNode(graphTrack, _ImageInput, opticalflow_yuv_image),
                VX.ChannelExtractNode(graphTrack, opticalflow_yuv_image, Channel.ChannelY, opticalflow_gray_image),
                VX.GaussianPyramidNode(graphTrack, opticalflow_gray_image, _PyramidCurrent),
                VX.OpticalFlowPyrLKNode(graphTrack, _PyramidPrevious, _PyramidCurrent, _KeypointsPrevious, _KeypointsPrevious, _KeypointsCurrent,
                                        lk_termination, epsilon, num_iterations,
                                        use_initial_estimate, (UIntPtr)lk_window_dimension
                                        )
            };
            VX.Release(nodesTrack);

            VX.VerifyGraph(graphTrack);

            _GraphHarris = graphHarris;
            _GraphTrack  = graphTrack;

            #endregion
        }
예제 #9
0
        private void VisionControl_Render(object sender, OpenGL.GlControlEventArgs e)
        {
            #region Draw Basic Picture

            // Update image input
            _Framebuffer.BindDraw(_GraphicsContext);
            Gl.Viewport(0, 0, (int)_Framebuffer.Width, (int)_Framebuffer.Height);
            _Framebuffer.Clear(_GraphicsContext, ClearBufferMask.ColorBufferBit);
            {                   // Draw a quad
                Matrix4x4f quadProj  = Matrix4x4f.Ortho2D(-1.0f, +1.0f, -1.0f, +1.0f);
                Matrix4x4f quadModel = new Matrix4x4f();

                _Angle += 1.0f;

                quadModel.RotateZ(10.0f * (float)Math.Cos(Angle.ToRadians(_Angle)));

                _GraphicsContext.Bind(_ProgramStd);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_ModelViewProjection", quadProj * quadModel);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_UniformColor", Vertex4f.One);

                _ArraysQuad.Draw(_GraphicsContext, _ProgramStd);
            }
            _Framebuffer.UnbindDraw(_GraphicsContext);

            #endregion

            #region Track Corners

            // Read back image input pixels
            using (OpenGL.Objects.Image imageInput = _FramebufferTexture.Get(_GraphicsContext, PixelLayout.RGB24, 0)) {
                // Copy the input RGB frame from OpenGL to OpenVX
                Rectangle cv_rgb_image_region = new Rectangle();
                cv_rgb_image_region.StartX = 0;
                cv_rgb_image_region.StartY = 0;
                cv_rgb_image_region.EndX   = imageInput.Width;
                cv_rgb_image_region.EndY   = imageInput.Height;

                ImagePatchAddressing cv_rgb_image_layout = new ImagePatchAddressing();
                cv_rgb_image_layout.StrideX = 3;
                cv_rgb_image_layout.StrideY = (int)imageInput.Stride;

                VX.CopyImagePatch(_ImageInput, ref cv_rgb_image_region, 0, ref cv_rgb_image_layout, imageInput.ImageBuffer, Accessor.WriteOnly, MemoryType.Host);
            }

            // Now that input RGB image is ready, just run a graph.
            // Run Harris at the beginning to initialize the previous keypoints,
            // on other frames run the tracking graph.
            VX.ProcessGraph(_DetectCorners ? _GraphHarris : _GraphTrack);

            _DetectCorners = false;

            #endregion

            #region Store Markers on GPU

            // To mark the keypoints in display, you need to access the output
            // keypoint array and draw each item on the output window using gui.DrawArrow().
            UIntPtr num_corners  = UIntPtr.Zero;
            uint    num_tracking = 0;

            _KeypointsPrevious = VX.GetReferenceFromDelay(_KeypointsDelay, -1);
            _KeypointsCurrent  = VX.GetReferenceFromDelay(_KeypointsDelay, 0);

            VX.Query(_KeypointsPrevious, ArrayAttribute.Numitems, out num_corners);
            if (num_corners.ToUInt64() > 0)
            {
                UIntPtr kp_old_stride = UIntPtr.Zero, kp_new_stride = UIntPtr.Zero;
                MapId   kp_old_map = new MapId(), kp_new_map = new MapId();
                IntPtr  kp_old_buf, kp_new_buf;

                VX.MapArrayRange(_KeypointsPrevious, (UIntPtr)0, num_corners, ref kp_old_map, ref kp_old_stride, out kp_old_buf, Accessor.ReadOnly, MemoryType.Host, 0);
                VX.MapArrayRange(_KeypointsCurrent, (UIntPtr)0, num_corners, ref kp_new_map, ref kp_new_stride, out kp_new_buf, Accessor.ReadOnly, MemoryType.Host, 0);

                _BufferOpticalMarkers.Map(_GraphicsContext, BufferAccess.WriteOnly);

                for (uint i = 0; i < num_corners.ToUInt64(); i++)
                {
                    KeyPoint kp_old = VX.ArrayItem <KeyPoint>(kp_old_buf, i, kp_old_stride);
                    KeyPoint kp_new = VX.ArrayItem <KeyPoint>(kp_new_buf, i, kp_new_stride);

                    if (kp_new.TrackingStatus != 0)
                    {
                        Vertex2f vOld = new Vertex2f(kp_old.X / 1024.0f, kp_old.Y / 1024.0f);
                        Vertex2f vNew = new Vertex2f(kp_new.X / 1024.0f, kp_new.Y / 1024.0f);

                        _BufferOpticalMarkers.SetElement(vOld, (num_tracking * 2) + 0, 0);
                        _BufferOpticalMarkers.SetElement(vNew, (num_tracking * 2) + 1, 0);

                        num_tracking++;
                    }
                }

                _BufferOpticalMarkers.Unmap(_GraphicsContext);

                VX.UnmapArrayRange(_KeypointsPrevious, kp_old_map);
                VX.UnmapArrayRange(_KeypointsCurrent, kp_new_map);
            }

            #endregion

            Gl.Viewport(0, 0, VisionControl.Width, VisionControl.Height);
            Gl.ClearColor(1.0f, 0.0f, 0.0f, 0.0f);
            Gl.Clear(ClearBufferMask.ColorBufferBit);

            #region Draw Input Image

            _GraphicsContext.Bind(_ProgramStdTex);
            _ProgramStdTex.SetUniform(_GraphicsContext, "glo_ModelViewProjection", Matrix4x4f.Ortho2D(0.0f, 1.0f, 0.0f, 1.0f));
            _ProgramStdTex.SetUniform(_GraphicsContext, "glo_Texture", _FramebufferTexture);

            _ArraysPostQuad.Draw(_GraphicsContext, _ProgramStdTex);

            #endregion

            #region Draw Markers

            if (num_tracking > 0)
            {
                _GraphicsContext.Bind(_ProgramStd);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_ModelViewProjection", Matrix4x4f.Ortho2D(0.0f, 1.0f, 0.0f, 1.0f));
                _ProgramStd.SetUniform(_GraphicsContext, "glo_UniformColor", new Vertex4f(1.0f, 0.0f, 0.0f, 1.0f));

                _ArraysOpticalMarkers.Draw(_GraphicsContext, _ProgramStd, 0, 0, num_tracking * 2);
            }

            #endregion

            // Increase the age of the delay objects to make the current entry become previous entry
            VX.AgeDelay(_PyramidDelay);
            VX.AgeDelay(_KeypointsDelay);
        }
예제 #10
0
        public void Load(System.IO.StreamReader sr)
        {
            clsPoint3d p1;

            //var version = myPGLoadedVersion.Split('.');
            //var nLoadedVersionMajor = version.Count() >= 1 ? Convert.ToInt32(version[0]) : 0;
            //var nLoadedVersionMinor = version.Count() >= 2 ? Convert.ToInt32(version[1]) : 0;

            while (sr.EndOfStream == false)
            {
                var myLine = sr.ReadLine();
                if (myLine == "END_MARKER_POINT_SETTINGS")
                {
                    break;
                }
                if (myLine.IndexOf(",") > -1)
                {
                    var mySplit = myLine.Split(',');
                    if (mySplit.GetUpperBound(0) == 1)
                    {
                        if (mySplit[0] == "MarkerID")
                        {
                            myMarkerID = Convert.ToInt32(mySplit[1]);
                        }
                        if (mySplit[0] == "SeenFromMarkerID")
                        {
                            mySeenFromMarkerID = Convert.ToInt32(mySplit[1]);
                        }
                        if (mySplit[0] == "SeenFromMarkerIDs")
                        {
                            mySeenFromMarkerIDs.Add(Convert.ToInt32(mySplit[1]));
                        }
                        if (mySplit[0] == "ActualMarkerID")
                        {
                            myActualMarkerID = Convert.ToInt32(mySplit[1]);
                        }
                        if (mySplit[0] == "VerticalVectorX")
                        {
                            if (myVerticalVect == null)
                            {
                                myVerticalVect = new clsPoint3d();
                            }
                            myVerticalVect.X = Convert.ToDouble(mySplit[1]);
                        }
                        if (mySplit[0] == "VerticalVectorY")
                        {
                            if (myVerticalVect == null)
                            {
                                myVerticalVect = new clsPoint3d();
                            }
                            myVerticalVect.Y = Convert.ToDouble(mySplit[1]);
                        }
                        if (mySplit[0] == "VerticalVectorZ")
                        {
                            if (myVerticalVect == null)
                            {
                                myVerticalVect = new clsPoint3d();
                            }
                            myVerticalVect.Z = Convert.ToDouble(mySplit[1]);
                        }
                        if (mySplit[0] == "BulkheadHeight")
                        {
                            BulkheadHeight = Convert.ToDouble(mySplit[1]);
                        }
                        if (mySplit[0] == "Confirmed")
                        {
                            _confirmed = (mySplit[1] == "1");
                        }
                    }
                }
            }

            if (mySeenFromMarkerIDs.Contains(mySeenFromMarkerID) == false)
            {
                mySeenFromMarkerIDs.Add(mySeenFromMarkerID);
            }

            myOrigin.Load(sr);
            myEndXAxis.Load(sr);
            myEndYAxis.Load(sr);
            myPoint.Load(sr);
            VX.Load(sr);
            VY.Load(sr);
            VZ.Load(sr);

            var n = Convert.ToInt32(sr.ReadLine());

            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                myCameraPoints.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                mySeenFromCameraPoints.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                myPts1.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                myPts2.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                myPts3.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                GyroData.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                LastGyroData.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                AccelData.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                p1 = new clsPoint3d();
                p1.Load(sr);
                LastAccelData.Add(p1);
            }

            n = Convert.ToInt32(sr.ReadLine());
            for (var i = 1; i <= n; i++)
            {
                var myHistoricPoint = new clsMarkerPoint();
                myHistoricPoint.Load(sr);
                myHistory.Add(myHistoricPoint);
            }
        }
예제 #11
0
        public void Save(System.IO.StreamWriter sw)
        {
            sw.WriteLine("MARKER_POINT_SETTINGS");
            sw.WriteLine("MarkerID," + myMarkerID.ToString());
            sw.WriteLine("SeenFromMarkerID," + mySeenFromMarkerID.ToString());
            sw.WriteLine("ActualMarkerID," + myActualMarkerID.ToString());
            if (myVerticalVect != null)
            {
                sw.WriteLine("VerticalVectorX," + myVerticalVect.X.ToString());
                sw.WriteLine("VerticalVectorY," + myVerticalVect.Y.ToString());
                sw.WriteLine("VerticalVectorZ," + myVerticalVect.Z.ToString());
            }
            sw.WriteLine("BulkheadHeight," + BulkheadHeight.ToString());
            foreach (int myID in mySeenFromMarkerIDs)
            {
                sw.WriteLine("SeenFromMarkerIDs," + myID.ToString());
            }
            sw.WriteLine("Confirmed," + (_confirmed ? "1" : "0"));
            sw.WriteLine("END_MARKER_POINT_SETTINGS");

            myOrigin.Save(sw);
            myEndXAxis.Save(sw);
            myEndYAxis.Save(sw);
            myPoint.Save(sw);
            VX.Save(sw);
            VY.Save(sw);
            VZ.Save(sw);

            sw.WriteLine(myCameraPoints.Count);
            foreach (clsPoint3d p1 in myCameraPoints)
            {
                p1.Save(sw);
            }

            sw.WriteLine(mySeenFromCameraPoints.Count);
            foreach (clsPoint3d p1 in mySeenFromCameraPoints)
            {
                p1.Save(sw);
            }

            sw.WriteLine(myPts1.Count);
            foreach (clsPoint3d p1 in myPts1)
            {
                p1.Save(sw);
            }

            sw.WriteLine(myPts2.Count);
            foreach (clsPoint3d p1 in myPts2)
            {
                p1.Save(sw);
            }

            sw.WriteLine(myPts3.Count);
            foreach (clsPoint3d p1 in myPts3)
            {
                p1.Save(sw);
            }

            sw.WriteLine(GyroData.Count);
            for (int i = 0; i < GyroData.Count; i++)
            {
                GyroData[i].Save(sw);
            }
            sw.WriteLine(LastGyroData.Count);
            for (int i = 0; i < LastGyroData.Count; i++)
            {
                LastGyroData[i].Save(sw);
            }
            sw.WriteLine(AccelData.Count);
            for (int i = 0; i < AccelData.Count; i++)
            {
                AccelData[i].Save(sw);
            }
            sw.WriteLine(LastAccelData.Count);
            for (int i = 0; i < LastAccelData.Count; i++)
            {
                LastAccelData[i].Save(sw);
            }

            sw.WriteLine(myHistory.Count);
            foreach (clsMarkerPoint myHistoricPoint in myHistory)
            {
                myHistoricPoint.Save(sw);
            }
        }