Пример #1
0
        private void SampleForm_Load(object sender, EventArgs e)
        {
            #region Corner Tracking Initialization

            uint                width = 1024, height = 1024;
            UIntPtr             max_keypoint_count      = (UIntPtr)10000;           // maximum number of keypoints to track
            float               harris_strength_thresh  = 0.0005f;                  // minimum corner strength to keep a corner
            float               harris_min_distance     = 5.0f;                     // radial L2 distance for non-max suppression
            float               harris_sensitivity      = 0.04f;                    // multiplier k in det(A) - k * trace(A)^2
            int                 harris_gradient_size    = 3;                        // window size for gradient computation
            int                 harris_block_size       = 3;                        // block window size for Harris corner score
            UIntPtr             lk_pyramid_levels       = (UIntPtr)6;               // number of pyramid levels for optical flow
            float               lk_pyramid_scale        = VX.SCALE_PYRAMID_HALF;    // pyramid levels scale by factor of two
            TerminationCriteria lk_termination          = TerminationCriteria.Both; // iteration termination criteria (eps & iterations)
            float               lk_epsilon              = 0.01f;                    // convergence criterion
            uint                lk_num_iterations       = 5;                        // maximum number of iterations
            bool                lk_use_initial_estimate = false;                    // don't use initial estimate
            uint                lk_window_dimension     = 6;                        // window size for evaluation
            float               trackable_kp_ratio_thr  = 0.8f;                     // threshold for the ration of tracked keypoints to all

            // Create the OpenVX context and make sure the returned context is valid.
            _Context = VX.CreateContext();

            // Create OpenVX image object for input RGB image.
            _ImageInput = VX.CreateImage(_Context, width, height, DfImage.Rgb);

            // OpenVX optical flow functionality requires image pyramids for the current
            // and the previous image. It also requires keypoints that correspond
            // to the previous pyramid and will output updated keypoints into
            // another keypoint array. To be able to toggle between the current and
            // the previous buffers, you need to use OpenVX delay objects and vxAgeDelay().
            // Create OpenVX pyramid and array object exemplars and create OpenVX delay
            // objects for both to hold two of each. Note that the exemplar objects are not
            // needed once the delay objects are created.
            using (Pyramid pyramid = VX.CreatePyramid(_Context, lk_pyramid_levels, lk_pyramid_scale, width, height, DfImage.U8))
                _PyramidDelay = VX.CreateDelay(_Context, pyramid, (UIntPtr)2);

            using (OpenVX.Array keypoints = VX.CreateArray(_Context, OpenVX.Type.Keypoint, max_keypoint_count))
                _KeypointsDelay = VX.CreateDelay(_Context, keypoints, (UIntPtr)2);


            // An object from a delay slot can be accessed using vxGetReferenceFromDelay API.
            // You need to use index = 0 for the current object and index = -1 for the previous object.

            _PyramidCurrent    = VX.GetReferenceFromDelay(_PyramidDelay, 0);
            _PyramidPrevious   = VX.GetReferenceFromDelay(_PyramidDelay, -1);
            _KeypointsCurrent  = VX.GetReferenceFromDelay(_KeypointsDelay, 0);
            _KeypointsPrevious = VX.GetReferenceFromDelay(_KeypointsDelay, -1);

            // Harris and optical flow algorithms require their own graph objects.
            // The Harris graph needs to extract gray scale image out of input RGB,
            // compute an initial set of keypoints, and compute an initial pyramid for use
            // by the optical flow graph.
            Graph graphHarris = VX.CreateGraph(_Context);
            Graph graphTrack  = VX.CreateGraph(_Context);

            // Harris and pyramid computation expect input to be an 8-bit image.
            // Given that input is an RGB image, it is best to extract a gray image
            // from RGB image, which requires two steps:
            //   - perform RGB to IYUV color conversion
            //   - extract Y channel from IYUV image
            // This requires two intermediate OpenVX image objects. Since you don't
            // need to access these objects from the application, they can be virtual
            // objects that can be created using the vxCreateVirtualImage API.
            OpenVX.Image harris_yuv_image       = VX.CreateVirtualImage(graphHarris, width, height, DfImage.Iyuv);
            OpenVX.Image harris_gray_image      = VX.CreateVirtualImage(graphHarris, width, height, DfImage.U8);
            OpenVX.Image opticalflow_yuv_image  = VX.CreateVirtualImage(graphTrack, width, height, DfImage.Iyuv);
            OpenVX.Image opticalflow_gray_image = VX.CreateVirtualImage(graphTrack, width, height, DfImage.U8);

            // The Harris corner detector and optical flow nodes (see "VX/vx_nodes.h")
            // need several scalar objects as parameters.
            Scalar strength_thresh      = VX.CreateScalar(_Context, ref harris_strength_thresh);
            Scalar min_distance         = VX.CreateScalar(_Context, ref harris_min_distance);
            Scalar sensitivity          = VX.CreateScalar(_Context, ref harris_sensitivity);
            Scalar epsilon              = VX.CreateScalar(_Context, ref lk_epsilon);
            Scalar num_iterations       = VX.CreateScalar(_Context, ref lk_num_iterations);
            Scalar use_initial_estimate = VX.CreateScalar(_Context, ref lk_use_initial_estimate);

            // Now all the objects have been created for building the graphs.
            // First, build a graph that performs Harris corner detection and initial pyramid computation.
            // See "VX/vx_nodes.h" for APIs how to add nodes into a graph.
            Node[] nodesHarris = new Node[] {
                VX.ColorConvertNode(graphHarris, _ImageInput, harris_yuv_image),
                VX.ChannelExtractNode(graphHarris, harris_yuv_image, Channel.ChannelY, harris_gray_image),
                VX.GaussianPyramidNode(graphHarris, harris_gray_image, _PyramidCurrent),
                VX.HarrisCornersNode(graphHarris, harris_gray_image, strength_thresh, min_distance, sensitivity, harris_gradient_size, harris_block_size, _KeypointsCurrent, Reference.Null)
            };
            VX.Release(nodesHarris);

            VX.VerifyGraph(graphHarris);

            // Now, build a graph that computes image pyramid for the next frame,
            // and tracks features using optical flow.
            Node[] nodesTrack = new Node[] {
                VX.ColorConvertNode(graphTrack, _ImageInput, opticalflow_yuv_image),
                VX.ChannelExtractNode(graphTrack, opticalflow_yuv_image, Channel.ChannelY, opticalflow_gray_image),
                VX.GaussianPyramidNode(graphTrack, opticalflow_gray_image, _PyramidCurrent),
                VX.OpticalFlowPyrLKNode(graphTrack, _PyramidPrevious, _PyramidCurrent, _KeypointsPrevious, _KeypointsPrevious, _KeypointsCurrent,
                                        lk_termination, epsilon, num_iterations,
                                        use_initial_estimate, (UIntPtr)lk_window_dimension
                                        )
            };
            VX.Release(nodesTrack);

            VX.VerifyGraph(graphTrack);

            _GraphHarris = graphHarris;
            _GraphTrack  = graphTrack;

            #endregion
        }
Пример #2
0
        private void VisionControl_Render(object sender, OpenGL.GlControlEventArgs e)
        {
            #region Draw Basic Picture

            // Update image input
            _Framebuffer.BindDraw(_GraphicsContext);
            Gl.Viewport(0, 0, (int)_Framebuffer.Width, (int)_Framebuffer.Height);
            _Framebuffer.Clear(_GraphicsContext, ClearBufferMask.ColorBufferBit);
            {                   // Draw a quad
                Matrix4x4f quadProj  = Matrix4x4f.Ortho2D(-1.0f, +1.0f, -1.0f, +1.0f);
                Matrix4x4f quadModel = new Matrix4x4f();

                _Angle += 1.0f;

                quadModel.RotateZ(10.0f * (float)Math.Cos(Angle.ToRadians(_Angle)));

                _GraphicsContext.Bind(_ProgramStd);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_ModelViewProjection", quadProj * quadModel);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_UniformColor", Vertex4f.One);

                _ArraysQuad.Draw(_GraphicsContext, _ProgramStd);
            }
            _Framebuffer.UnbindDraw(_GraphicsContext);

            #endregion

            #region Track Corners

            // Read back image input pixels
            using (OpenGL.Objects.Image imageInput = _FramebufferTexture.Get(_GraphicsContext, PixelLayout.RGB24, 0)) {
                // Copy the input RGB frame from OpenGL to OpenVX
                Rectangle cv_rgb_image_region = new Rectangle();
                cv_rgb_image_region.StartX = 0;
                cv_rgb_image_region.StartY = 0;
                cv_rgb_image_region.EndX   = imageInput.Width;
                cv_rgb_image_region.EndY   = imageInput.Height;

                ImagePatchAddressing cv_rgb_image_layout = new ImagePatchAddressing();
                cv_rgb_image_layout.StrideX = 3;
                cv_rgb_image_layout.StrideY = (int)imageInput.Stride;

                VX.CopyImagePatch(_ImageInput, ref cv_rgb_image_region, 0, ref cv_rgb_image_layout, imageInput.ImageBuffer, Accessor.WriteOnly, MemoryType.Host);
            }

            // Now that input RGB image is ready, just run a graph.
            // Run Harris at the beginning to initialize the previous keypoints,
            // on other frames run the tracking graph.
            VX.ProcessGraph(_DetectCorners ? _GraphHarris : _GraphTrack);

            _DetectCorners = false;

            #endregion

            #region Store Markers on GPU

            // To mark the keypoints in display, you need to access the output
            // keypoint array and draw each item on the output window using gui.DrawArrow().
            UIntPtr num_corners  = UIntPtr.Zero;
            uint    num_tracking = 0;

            _KeypointsPrevious = VX.GetReferenceFromDelay(_KeypointsDelay, -1);
            _KeypointsCurrent  = VX.GetReferenceFromDelay(_KeypointsDelay, 0);

            VX.Query(_KeypointsPrevious, ArrayAttribute.Numitems, out num_corners);
            if (num_corners.ToUInt64() > 0)
            {
                UIntPtr kp_old_stride = UIntPtr.Zero, kp_new_stride = UIntPtr.Zero;
                MapId   kp_old_map = new MapId(), kp_new_map = new MapId();
                IntPtr  kp_old_buf, kp_new_buf;

                VX.MapArrayRange(_KeypointsPrevious, (UIntPtr)0, num_corners, ref kp_old_map, ref kp_old_stride, out kp_old_buf, Accessor.ReadOnly, MemoryType.Host, 0);
                VX.MapArrayRange(_KeypointsCurrent, (UIntPtr)0, num_corners, ref kp_new_map, ref kp_new_stride, out kp_new_buf, Accessor.ReadOnly, MemoryType.Host, 0);

                _BufferOpticalMarkers.Map(_GraphicsContext, BufferAccess.WriteOnly);

                for (uint i = 0; i < num_corners.ToUInt64(); i++)
                {
                    KeyPoint kp_old = VX.ArrayItem <KeyPoint>(kp_old_buf, i, kp_old_stride);
                    KeyPoint kp_new = VX.ArrayItem <KeyPoint>(kp_new_buf, i, kp_new_stride);

                    if (kp_new.TrackingStatus != 0)
                    {
                        Vertex2f vOld = new Vertex2f(kp_old.X / 1024.0f, kp_old.Y / 1024.0f);
                        Vertex2f vNew = new Vertex2f(kp_new.X / 1024.0f, kp_new.Y / 1024.0f);

                        _BufferOpticalMarkers.SetElement(vOld, (num_tracking * 2) + 0, 0);
                        _BufferOpticalMarkers.SetElement(vNew, (num_tracking * 2) + 1, 0);

                        num_tracking++;
                    }
                }

                _BufferOpticalMarkers.Unmap(_GraphicsContext);

                VX.UnmapArrayRange(_KeypointsPrevious, kp_old_map);
                VX.UnmapArrayRange(_KeypointsCurrent, kp_new_map);
            }

            #endregion

            Gl.Viewport(0, 0, VisionControl.Width, VisionControl.Height);
            Gl.ClearColor(1.0f, 0.0f, 0.0f, 0.0f);
            Gl.Clear(ClearBufferMask.ColorBufferBit);

            #region Draw Input Image

            _GraphicsContext.Bind(_ProgramStdTex);
            _ProgramStdTex.SetUniform(_GraphicsContext, "glo_ModelViewProjection", Matrix4x4f.Ortho2D(0.0f, 1.0f, 0.0f, 1.0f));
            _ProgramStdTex.SetUniform(_GraphicsContext, "glo_Texture", _FramebufferTexture);

            _ArraysPostQuad.Draw(_GraphicsContext, _ProgramStdTex);

            #endregion

            #region Draw Markers

            if (num_tracking > 0)
            {
                _GraphicsContext.Bind(_ProgramStd);
                _ProgramStd.SetUniform(_GraphicsContext, "glo_ModelViewProjection", Matrix4x4f.Ortho2D(0.0f, 1.0f, 0.0f, 1.0f));
                _ProgramStd.SetUniform(_GraphicsContext, "glo_UniformColor", new Vertex4f(1.0f, 0.0f, 0.0f, 1.0f));

                _ArraysOpticalMarkers.Draw(_GraphicsContext, _ProgramStd, 0, 0, num_tracking * 2);
            }

            #endregion

            // Increase the age of the delay objects to make the current entry become previous entry
            VX.AgeDelay(_PyramidDelay);
            VX.AgeDelay(_KeypointsDelay);
        }