Example #1
0
        public virtual FrameworkReturnCode transform(Point3DfArray inputPoints, Transform3Df transformation, Point3DfArray outputPoints)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.I3DTransform_transform(swigCPtr, Point3DfArray.getCPtr(inputPoints), Transform3Df.getCPtr(transformation), Point3DfArray.getCPtr(outputPoints));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #2
0
        public virtual FrameworkReturnCode getWorldCorners(Point3DfArray worldCorners)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_input_filesPINVOKE.IMarker2DSquared_getWorldCorners(swigCPtr, Point3DfArray.getCPtr(worldCorners));

            if (solar_api_input_filesPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_input_filesPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #3
0
        public virtual FrameworkReturnCode map(Point2DfArray digitalPoints, Point3DfArray worldPoints)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IImage2WorldMapper_map(swigCPtr, Point2DfArray.getCPtr(digitalPoints), Point3DfArray.getCPtr(worldPoints));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #4
0
        public virtual FrameworkReturnCode unproject(Point2DfArray imagePoints, Point3DfArray worldPoints, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IUnproject_unproject__SWIG_0(swigCPtr, Point2DfArray.getCPtr(imagePoints), Point3DfArray.getCPtr(worldPoints), Transform3Df.getCPtr(pose));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #5
0
        public virtual FrameworkReturnCode unproject(KeypointArray imageKeypoints, Point3DfArray worldPoints)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IUnproject_unproject__SWIG_3(swigCPtr, KeypointArray.getCPtr(imageKeypoints), Point3DfArray.getCPtr(worldPoints));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #6
0
        public virtual FrameworkReturnCode project(Point3DfArray inputPoints, Point2DfArray imagePoints)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IProject_project__SWIG_1(swigCPtr, Point3DfArray.getCPtr(inputPoints), Point2DfArray.getCPtr(imagePoints));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #7
0
        public virtual FrameworkReturnCode estimate(Point2DfArray imagePoints, Point3DfArray worldPoints, Point2DfArray imagePoints_inlier, Point3DfArray worldPoints_inlier, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_solver_posePINVOKE.I3DTransformSACFinderFrom2D3D_estimate__SWIG_1(swigCPtr, Point2DfArray.getCPtr(imagePoints), Point3DfArray.getCPtr(worldPoints), Point2DfArray.getCPtr(imagePoints_inlier), Point3DfArray.getCPtr(worldPoints_inlier), Transform3Df.getCPtr(pose));

            if (solar_api_solver_posePINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_solver_posePINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #8
0
        public virtual FrameworkReturnCode find(Frame lastFrame, Frame currentFrame, DescriptorMatchVector current_matches, Map worldMap, Point3DfArray shared_3dpoint, Point2DfArray shared_2dpoint, DescriptorMatchVector found_matches, DescriptorMatchVector remaining_matches)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_solver_posePINVOKE.I2D3DCorrespondencesFinder_find__SWIG_1(swigCPtr, Frame.getCPtr(lastFrame), Frame.getCPtr(currentFrame), DescriptorMatchVector.getCPtr(current_matches), Map.getCPtr(worldMap), Point3DfArray.getCPtr(shared_3dpoint), Point2DfArray.getCPtr(shared_2dpoint), DescriptorMatchVector.getCPtr(found_matches), DescriptorMatchVector.getCPtr(remaining_matches));

            if (solar_api_solver_posePINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_solver_posePINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Example #9
0
        public NaturalPipeline(IComponentManager xpcfComponentManager) : base(xpcfComponentManager)
        {
            imageViewerKeypoints = Create <IImageViewer>("SolARImageViewerOpencv", "keypoints");
            imageViewerResult    = Create <IImageViewer>("SolARImageViewerOpencv");
            marker               = Create <IMarker2DNaturalImage>("SolARMarker2DNaturalImageOpencv");
            kpDetector           = Create <IKeypointDetector>("SolARKeypointDetectorOpencv");
            kpDetectorRegion     = Create <IKeypointDetectorRegion>("SolARKeypointDetectorRegionOpencv");
            descriptorExtractor  = Create <IDescriptorsExtractor>("SolARDescriptorsExtractorAKAZE2Opencv");
            matcher              = Create <IDescriptorMatcher>("SolARDescriptorMatcherKNNOpencv");
            geomMatchesFilter    = Create <IMatchesFilter>("SolARGeometricMatchesFilterOpencv");
            poseEstimationPlanar = Create <I3DTransformSACFinderFrom2D3D>("SolARPoseEstimationPlanarPointsOpencv");
            opticalFlow          = Create <IOpticalFlowEstimator>("SolAROpticalFlowPyrLKOpencv");
            projection           = Create <IProject>("SolARProjectOpencv");
            unprojection         = Create <IUnproject>("SolARUnprojectPlanarPointsOpencv");
            img_mapper           = Create <IImage2WorldMapper>("SolARImage2WorldMapper4Marker2D");
            basicMatchesFilter   = Create <IMatchesFilter>("SolARBasicMatchesFilter");
            keypointsReindexer   = Create <IKeypointsReIndexer>("SolARKeypointsReIndexer");
            overlay3DComponent   = Create <I3DOverlay>("SolAR3DOverlayOpencv");
            /* in dynamic mode, we need to check that components are well created*/
            /* this is needed in dynamic mode */
            if (new object[] { imageViewerKeypoints, imageViewerResult, marker, kpDetector, kpDetectorRegion, descriptorExtractor, matcher,
                               geomMatchesFilter, poseEstimationPlanar, opticalFlow, projection, unprojection, img_mapper,
                               basicMatchesFilter, keypointsReindexer, overlay3DComponent }.Contains(null))
            {
                LOG_ERROR("One or more component creations have failed");
                return;
            }
            LOG_INFO("All components have been created");

            // Declare data structures used to exchange information between components
            refImage         = SharedPtr.Alloc <Image>().AddTo(subscriptions);
            previousCamImage = SharedPtr.Alloc <Image>().AddTo(subscriptions);

            //kpImageCam = SharedPtr.Alloc<Image>().AddTo(subscriptions);
            refDescriptors = SharedPtr.Alloc <DescriptorBuffer>().AddTo(subscriptions);
            camDescriptors = SharedPtr.Alloc <DescriptorBuffer>().AddTo(subscriptions);
            matches        = new DescriptorMatchVector().AddTo(subscriptions);

            // where to store detected keypoints in ref image and camera image
            refKeypoints = new KeypointArray().AddTo(subscriptions);
            camKeypoints = new KeypointArray().AddTo(subscriptions);

            markerWorldCorners = new Point3DfArray().AddTo(subscriptions);

            // load marker
            marker.loadMarker().Check();
            marker.getWorldCorners(markerWorldCorners).Check();
            marker.getImage(refImage).Check();

            // detect keypoints in reference image
            kpDetector.detect(refImage, refKeypoints);

            // extract descriptors in reference image
            descriptorExtractor.extract(refImage, refKeypoints, refDescriptors);



            // initialize image mapper with the reference image size and marker size
            var img_mapper_config = img_mapper.BindTo <IConfigurable>().AddTo(subscriptions);
            var refSize           = refImage.getSize();
            var mkSize            = marker.getSize();

            img_mapper_config.getProperty("digitalWidth").setIntegerValue((int)refSize.width);
            img_mapper_config.getProperty("digitalHeight").setIntegerValue((int)refSize.height);
            img_mapper_config.getProperty("worldWidth").setFloatingValue(mkSize.width);
            img_mapper_config.getProperty("worldHeight").setFloatingValue(mkSize.height);

            // vector of 4 corners in the marker
            refImgCorners = new Point2DfArray();
            float    w = refImage.getWidth(), h = refImage.getHeight();
            Point2Df corner0 = new Point2Df(0, 0);
            Point2Df corner1 = new Point2Df(w, 0);
            Point2Df corner2 = new Point2Df(w, h);
            Point2Df corner3 = new Point2Df(0, h);

            refImgCorners.Add(corner0);
            refImgCorners.Add(corner1);
            refImgCorners.Add(corner2);
            refImgCorners.Add(corner3);
        }
Example #10
0
        public override FrameworkReturnCode Proceed(Image camImage, Transform3Df pose, ICamera camera)
        {
            // initialize overlay 3D component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            overlay3DComponent.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize pose estimation based on planar points with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            poseEstimationPlanar.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize projection component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            projection.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize unprojection component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            unprojection.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());
            if (!isTrack)
            {
                kpDetector.detect(camImage, camKeypoints);
                descriptorExtractor.extract(camImage, camKeypoints, camDescriptors);
                matcher.match(refDescriptors, camDescriptors, matches);
                basicMatchesFilter.filter(matches, matches, refKeypoints, camKeypoints);
                geomMatchesFilter.filter(matches, matches, refKeypoints, camKeypoints);

                var ref2Dpoints = new Point2DfArray();
                var cam2Dpoints = new Point2DfArray();
                var ref3Dpoints = new Point3DfArray();

                if (matches.Count > 10)
                {
                    keypointsReindexer.reindex(refKeypoints, camKeypoints, matches, ref2Dpoints, cam2Dpoints).Check();
                    img_mapper.map(ref2Dpoints, ref3Dpoints).Check();
                    if (poseEstimationPlanar.estimate(cam2Dpoints, ref3Dpoints, imagePoints_inliers, worldPoints_inliers, pose) != FrameworkReturnCode._SUCCESS)
                    {
                        valid_pose = false;
                        //LOG_DEBUG("Wrong homography for this frame");
                    }
                    else
                    {
                        isTrack = true;
                        needNewTrackedPoints = true;
                        valid_pose           = true;
                        previousCamImage     = camImage.copy();
                        //LOG_INFO("Start tracking", pose.matrix());
                    }
                }
            }
            else
            {
                // initialize points to track
                if (needNewTrackedPoints)
                {
                    imagePoints_track.Clear();
                    worldPoints_track.Clear();
                    KeypointArray newKeypoints = new KeypointArray();
                    // Get the projection of the corner of the marker in the current image
                    projection.project(markerWorldCorners, projectedMarkerCorners, pose);

                    // Detect the keypoints within the contours of the marker defined by the projected corners
                    kpDetectorRegion.detect(previousCamImage, projectedMarkerCorners, newKeypoints);

                    if (newKeypoints.Count > updateTrackedPointThreshold)
                    {
                        foreach (var keypoint in newKeypoints)
                        {
                            //imagePoints_track.push_back(xpcf::utils::make_shared<Point2Df>(keypoint->getX(), keypoint->getY()));
                            imagePoints_track.Add(new Point2Df(keypoint.getX(), keypoint.getY()));
                        }
                        // get back the 3D positions of the detected keypoints in world space
                        unprojection.unproject(imagePoints_track, worldPoints_track, pose);
                        //LOG_DEBUG("Reinitialize points to track");
                    }
                    else
                    {
                        isTrack = false;
                        //LOG_DEBUG("Cannot reinitialize points to track");
                    }
                    needNewTrackedPoints = false;
                }

                // Tracking mode
                if (isTrack)
                {
                    Point2DfArray trackedPoints = new Point2DfArray();
                    Point2DfArray pts2D         = new Point2DfArray();
                    Point3DfArray pts3D         = new Point3DfArray();

                    UCharList status = new UCharList();
                    FloatList err    = new FloatList();

                    // tracking 2D-2D
                    opticalFlow.estimate(previousCamImage, camImage, imagePoints_track, trackedPoints, status, err);

                    for (int i = 0; i < status.Count; i++)
                    {
                        if (status[i] == 1)
                        {
                            pts2D.Add(trackedPoints[i]);
                            pts3D.Add(worldPoints_track[i]);
                        }
                    }
                    // calculate camera pose
                    // Estimate the pose from the 2D-3D planar correspondence
                    if (poseEstimationPlanar.estimate(pts2D, pts3D, imagePoints_track, worldPoints_track, pose) != FrameworkReturnCode._SUCCESS)
                    {
                        isTrack              = false;
                        valid_pose           = false;
                        needNewTrackedPoints = false;
                        //LOG_INFO("Tracking lost");
                    }
                    else
                    {
                        valid_pose       = true;
                        previousCamImage = camImage.copy();
                        if (worldPoints_track.Count < updateTrackedPointThreshold)
                        {
                            needNewTrackedPoints = true;
                        }
                    }
                }
                //else
                //LOG_INFO("Tracking lost");
            }

            if (valid_pose)
            {
                // We draw a box on the place of the recognized natural marker
                overlay3DComponent.draw(pose, camImage);
            }
            //if (imageViewerResult.display(camImage) == FrameworkReturnCode._STOP) return FrameworkReturnCode._STOP;
            return(FrameworkReturnCode._SUCCESS);
        }
Example #11
0
        public FiducialPipeline(IComponentManager xpcfComponentManager) : base(xpcfComponentManager)
        {
            binaryMarker = Create <IMarker2DSquaredBinary>("SolARMarker2DSquaredBinaryOpencv");

#if !NDEBUG
            imageViewer                 = Create <IImageViewer>("SolARImageViewerOpencv");
            imageViewerGrey             = Create <IImageViewer>("SolARImageViewerOpencv", "grey");
            imageViewerBinary           = Create <IImageViewer>("SolARImageViewerOpencv", "binary");
            imageViewerContours         = Create <IImageViewer>("SolARImageViewerOpencv", "contours");
            imageViewerFilteredContours = Create <IImageViewer>("SolARImageViewerOpencv", "filteredContours");
#endif
            overlay3DComponent = Create <I3DOverlay>("SolAR3DOverlayOpencv");

            imageFilterBinary          = Create <IImageFilter>("SolARImageFilterBinaryOpencv");
            imageConvertor             = Create <IImageConvertor>("SolARImageConvertorOpencv");
            contoursExtractor          = Create <IContoursExtractor>("SolARContoursExtractorOpencv");
            contoursFilter             = Create <IContoursFilter>("SolARContoursFilterBinaryMarkerOpencv");
            perspectiveController      = Create <IPerspectiveController>("SolARPerspectiveControllerOpencv");
            patternDescriptorExtractor = Create <IDescriptorsExtractorSBPattern>("SolARDescriptorsExtractorSBPatternOpencv");

            patternMatcher   = Create <IDescriptorMatcher>("SolARDescriptorMatcherRadiusOpencv");
            patternReIndexer = Create <ISBPatternReIndexer>("SolARSBPatternReIndexer");

            img2worldMapper = Create <IImage2WorldMapper>("SolARImage2WorldMapper4Marker2D");
            PnP             = Create <I3DTransformFinderFrom2D3D>("SolARPoseEstimationPnpOpencv");
#if !NDEBUG
            overlay2DContours = Create <I2DOverlay>("SolAR2DOverlayOpencv", "contours");
            overlay2DCircles  = Create <I2DOverlay>("SolAR2DOverlayOpencv", "circles");
#endif

            greyImage   = SharedPtr.Alloc <Image>().AddTo(subscriptions);
            binaryImage = SharedPtr.Alloc <Image>().AddTo(subscriptions);

            contours                      = new Contour2DfArray().AddTo(subscriptions);
            filtered_contours             = new Contour2DfArray().AddTo(subscriptions);
            patches                       = new ImageList().AddTo(subscriptions);
            recognizedContours            = new Contour2DfArray().AddTo(subscriptions);
            recognizedPatternsDescriptors = new DescriptorBuffer().AddTo(subscriptions);
            markerPatternDescriptor       = new DescriptorBuffer().AddTo(subscriptions);
            patternMatches                = new DescriptorMatchVector().AddTo(subscriptions);
            pattern2DPoints               = new Point2DfArray().AddTo(subscriptions);
            img2DPoints                   = new Point2DfArray().AddTo(subscriptions);
            pattern3DPoints               = new Point3DfArray().AddTo(subscriptions);
            //CamCalibration K;

            // components initialisation
            binaryMarker.loadMarker().Check();
            patternDescriptorExtractor.extract(binaryMarker.getPattern(), markerPatternDescriptor).Check();
            var binaryMarkerSize = binaryMarker.getSize();

            var patternSize = binaryMarker.getPattern().getSize();

            patternDescriptorExtractor.BindTo <IConfigurable>().getProperty("patternSize").setIntegerValue(patternSize);
            patternReIndexer.BindTo <IConfigurable>().getProperty("sbPatternSize").setIntegerValue(patternSize);

            // NOT WORKING ! initialize image mapper with the reference image size and marker size
            var img2worldMapperConf = img2worldMapper.BindTo <IConfigurable>();
            img2worldMapperConf.getProperty("digitalWidth").setIntegerValue(patternSize);
            img2worldMapperConf.getProperty("digitalHeight").setIntegerValue(patternSize);
            img2worldMapperConf.getProperty("worldWidth").setFloatingValue(binaryMarkerSize.width);
            img2worldMapperConf.getProperty("worldHeight").setFloatingValue(binaryMarkerSize.height);
        }