Exemplo n.º 1
0
        static Pose CallPluginAtEndOfFrames(Transform3Df pose)
        {
            Matrix4x4 cameraPoseFromSolAR = Matrix4x4.identity;

            var pos = pose.translation();
            var rot = pose.rotation();

            //cameraPoseFromSolAR.SetRow(0, new Vector4(rot.coeff(0, 0), rot.coeff(0, 1), rot.coeff(0, 2), pos.coeff(0, 0)));
            //cameraPoseFromSolAR.SetRow(1, new Vector4(rot.coeff(1, 0), rot.coeff(1, 1), rot.coeff(1, 2), pos.coeff(0, 1)));
            //cameraPoseFromSolAR.SetRow(2, new Vector4(rot.coeff(2, 0), rot.coeff(2, 1), rot.coeff(2, 2), pos.coeff(0, 2)));
            //cameraPoseFromSolAR.SetRow(3, new Vector4(0, 0, 0, 1));
            for (int r = 0; r < 3; ++r)
            {
                for (int c = 0; c < 3; ++c)
                {
                    cameraPoseFromSolAR[r, c] = rot.coeff(r, c);
                }
                cameraPoseFromSolAR[r, 3] = pos.coeff(0, r);
            }

            Matrix4x4 unityCameraPose = invertMatrix * cameraPoseFromSolAR;

            Vector3 forward = new Vector3(unityCameraPose.m02, unityCameraPose.m12, unityCameraPose.m22);
            Vector3 up      = new Vector3(unityCameraPose.m01, unityCameraPose.m11, unityCameraPose.m21);

            var q = Quaternion.LookRotation(forward, -up);
            var p = new Vector3(unityCameraPose.m03, unityCameraPose.m13, unityCameraPose.m23);

            return(new Pose(p, q));
        }
Exemplo n.º 2
0
 public virtual void set(Transform3Df pose, Image image)
 {
     solar_api_sinkPINVOKE.ISinkPoseImage_set__SWIG_0(swigCPtr, Transform3Df.getCPtr(pose), Image.getCPtr(image));
     if (solar_api_sinkPINVOKE.SWIGPendingException.Pending)
     {
         throw solar_api_sinkPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Exemplo n.º 3
0
 public virtual void draw(Transform3Df pose, Image displayImage)
 {
     solar_api_displayPINVOKE.I3DOverlay_draw(swigCPtr, Transform3Df.getCPtr(pose), Image.getCPtr(displayImage));
     if (solar_api_displayPINVOKE.SWIGPendingException.Pending)
     {
         throw solar_api_displayPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Exemplo n.º 4
0
 public virtual void filter(Transform3Df pose1, Transform3Df pose2, CloudPointVector input, CloudPointVector output, IntVector index)
 {
     solar_api_solver_mapPINVOKE.IMapFilter_filter__SWIG_1(swigCPtr, Transform3Df.getCPtr(pose1), Transform3Df.getCPtr(pose2), CloudPointVector.getCPtr(input), CloudPointVector.getCPtr(output), IntVector.getCPtr(index));
     if (solar_api_solver_mapPINVOKE.SWIGPendingException.Pending)
     {
         throw solar_api_solver_mapPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Exemplo n.º 5
0
        public virtual SinkReturnCode tryGet(Transform3Df pose)
        {
            SinkReturnCode ret = (SinkReturnCode)solar_api_sinkPINVOKE.ISinkPoseImage_tryGet(swigCPtr, Transform3Df.getCPtr(pose));

            if (solar_api_sinkPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_sinkPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 6
0
        public PIPELINEMANAGER_RETURNCODE udpate(Transform3Df pose)
        {
            PIPELINEMANAGER_RETURNCODE ret = (PIPELINEMANAGER_RETURNCODE)SolARPipelineManagerPINVOKE.SolARPluginPipelineManager_udpate(swigCPtr, Transform3Df.getCPtr(pose));

            if (SolARPipelineManagerPINVOKE.SWIGPendingException.Pending)
            {
                throw SolARPipelineManagerPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 7
0
        public virtual SinkReturnCode udpate(Transform3Df pose)
        {
            SinkReturnCode ret = (SinkReturnCode)solar_api_sinkPINVOKE.ISinkPoseTextureBuffer_udpate(swigCPtr, Transform3Df.getCPtr(pose));

            if (solar_api_sinkPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_sinkPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 8
0
        public virtual FrameworkReturnCode process(Transform3Df outputData)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_fusionPINVOKE.IVisualInertialFusion_process(swigCPtr, Transform3Df.getCPtr(outputData));

            if (solar_api_fusionPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_fusionPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 9
0
        public virtual FrameworkReturnCode transform(Point3DfArray inputPoints, Transform3Df transformation, Point3DfArray outputPoints)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.I3DTransform_transform(swigCPtr, Point3DfArray.getCPtr(inputPoints), Transform3Df.getCPtr(transformation), Point3DfArray.getCPtr(outputPoints));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 10
0
        public virtual FrameworkReturnCode display(CloudPointVector points, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_displayPINVOKE.I3DPointsViewer_display__SWIG_4(swigCPtr, CloudPointVector.getCPtr(points), Transform3Df.getCPtr(pose));

            if (solar_api_displayPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_displayPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 11
0
        public virtual SinkReturnCode update(Transform3Df pose)
        {
            SinkReturnCode ret = (SinkReturnCode)solar_api_pipelinePINVOKE.IPipeline_update(swigCPtr, Transform3Df.getCPtr(pose));

            if (solar_api_pipelinePINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_pipelinePINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 12
0
        public virtual FrameworkReturnCode relocalize(Frame frame, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_relocPINVOKE.IRelocalizer_relocalize(swigCPtr, Frame.getCPtr(frame), Transform3Df.getCPtr(pose));

            if (solar_api_relocPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_relocPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 13
0
        public static Pose ToUnity(this Transform3Df pose)
        {
            var inv = new Matrix4x4();

            inv.SetRow(0, new Vector4(+1, +0, +0, +0));
            inv.SetRow(1, new Vector4(+0, -1, +0, +0));
            inv.SetRow(2, new Vector4(+0, +0, +1, +0));
            inv.SetRow(3, new Vector4(+0, +0, +0, +1));

            var rot   = pose.rotation();
            var trans = pose.translation();
            var m     = new Matrix4x4();

            for (int r = 0; r < 3; ++r)
            {
                for (int c = 0; c < 3; ++c)
                {
                    m[r, c] = rot.coeff(r, c);
                }
                m[r, 3] = trans.coeff(r, 0);
            }
            m.SetRow(3, new Vector4(0, 0, 0, 1));

            m = m.inverse;

            m = inv * m;

            //m = m.inverse;

            var v = m.GetColumn(3);
            var q = Quaternion.LookRotation(m.GetColumn(2), m.GetColumn(1));

            q = Quaternion.Inverse(q);
            v = q * -v;

            return(new Pose(v, q));
        }
Exemplo n.º 14
0
        public virtual double triangulate(KeypointArray keypointsView1, KeypointArray keypointsView2, DescriptorBuffer descriptor1, DescriptorBuffer descriptor2, DescriptorMatchVector matches, PairUIntUInt working_views, Transform3Df poseView1, Transform3Df poseView2, CloudPointVector pcloud)
        {
            double ret = solar_api_solver_mapPINVOKE.ITriangulator_triangulate__SWIG_2(swigCPtr, KeypointArray.getCPtr(keypointsView1), KeypointArray.getCPtr(keypointsView2), DescriptorBuffer.getCPtr(descriptor1), DescriptorBuffer.getCPtr(descriptor2), DescriptorMatchVector.getCPtr(matches), PairUIntUInt.getCPtr(working_views), Transform3Df.getCPtr(poseView1), Transform3Df.getCPtr(poseView2), CloudPointVector.getCPtr(pcloud));

            if (solar_api_solver_mapPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_solver_mapPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 15
0
        void Update()
        {
            if (isScanning && UpdateReady)
            {
                if (m_pipelineManager != null)
                {
                    if (m_Unity_Webcam)
                    {
                        GetPhysicalCameraFrame();
                    }
                    Transform3Df pose = new Transform3Df();

                    var _returnCode = m_pipelineManager.udpate(pose);

                    if (_returnCode == PIPELINEMANAGER_RETURNCODE._NEW_POSE || _returnCode == PIPELINEMANAGER_RETURNCODE._NEW_POSE_AND_IMAGE)
                    {
                        if (!isMarkerFound)
                        {
                            isMarkerFound = true;
                            findDeviceCanvas.SetActive(false);
                            cardboardLoader.SetActive(true);
                            deviceCameraScript.gameObject.SetActive(false);
                            phoneManagerScript.ChangeVisibility(true);
                        }

                        Matrix4x4 cameraPoseFromSolAR = new Matrix4x4();

                        cameraPoseFromSolAR.SetRow(0, new Vector4(pose.rotation().coeff(0, 0), pose.rotation().coeff(0, 1), pose.rotation().coeff(0, 2), pose.translation().coeff(0, 0)));
                        cameraPoseFromSolAR.SetRow(1, new Vector4(pose.rotation().coeff(1, 0), pose.rotation().coeff(1, 1), pose.rotation().coeff(1, 2), pose.translation().coeff(1, 0)));
                        cameraPoseFromSolAR.SetRow(2, new Vector4(pose.rotation().coeff(2, 0), pose.rotation().coeff(2, 1), pose.rotation().coeff(2, 2), pose.translation().coeff(2, 0)));
                        cameraPoseFromSolAR.SetRow(3, new Vector4(0, 0, 0, 1));

                        Matrix4x4 invertMatrix = new Matrix4x4();
                        invertMatrix.SetRow(0, new Vector4(1, 0, 0, 0));
                        invertMatrix.SetRow(1, new Vector4(0, -1, 0, 0));
                        invertMatrix.SetRow(2, new Vector4(0, 0, 1, 0));
                        invertMatrix.SetRow(3, new Vector4(0, 0, 0, 1));
                        Matrix4x4 unityCameraPose = invertMatrix * cameraPoseFromSolAR;

                        Vector3 forward  = new Vector3(unityCameraPose.m02, unityCameraPose.m12, unityCameraPose.m22);
                        Vector3 up       = new Vector3(unityCameraPose.m01, unityCameraPose.m11, unityCameraPose.m21);
                        var     rotation = Quaternion.LookRotation(forward, -up);
                        var     position = -new Vector3(unityCameraPose.m03, unityCameraPose.m13, unityCameraPose.m23);

                        m_PrevSolARObj.localPosition    = position;
                        m_RotationControl.localRotation = rotation;
                        var prevParent = m_PrevSolARObj.parent;
                        m_PrevSolARObj.parent           = m_RotationControl;
                        m_RotationControl.localRotation = Quaternion.identity;
                        m_PrevSolARObj.parent           = prevParent;


                        m_PrevSolARObj.localRotation = Quaternion.Inverse(rotation);
                        m_PrevSolARObj.Rotate(new Vector3(-90, 0, 0), Space.Self);

                        if (Screen.orientation == ScreenOrientation.LandscapeRight)
                        {
                            m_PrevSolARObj.localPosition -= new Vector3(cameraXShift, 0, 0);
                        }
                        else
                        {
                            m_PrevSolARObj.localPosition += new Vector3(cameraXShift, 0, 0);
                        }

                        var rotDist = Quaternion.Angle(prevRot, m_PrevSolARObj.rotation);
                        if (rotDist >= 10)
                        {
                            prevRot = m_PrevSolARObj.rotation;
                            socketSignalerScript.ResetGyroInverse();
                        }

                        var   posDist        = Vector3.Distance(m_SolARObj.position, m_PrevSolARObj.position);
                        float distLerpStatus = (Time.time - distLerpTime) * lerpSpeed;
                        ////var angleDifferences = GetAngleDifferences(m_SolARObj.rotation, m_PrevSolARObj.rotation);
                        if (posDist >= 0.008 && distLerpStatus >= 1)
                        {
                            //Debug.Log("Pos dist: " + posDist);
                            //m_SolARObj.position = m_PrevSolARObj.position;
                            distLerpTime = Time.time;
                            curPos       = m_SolARObj.position;
                            prevPos      = m_PrevSolARObj.position;
                        }

                        distLerpStatus      = (Time.time - distLerpTime) * lerpSpeed;
                        m_SolARObj.position = Vector3.Lerp(curPos, prevPos, distLerpStatus);
                    }
                }
            }

            SetFusedRotation();
        }
Exemplo n.º 16
0
 public virtual void filter(DescriptorMatchVector inputMatches, DescriptorMatchVector outputMatches, KeypointArray inputKeyPoints1, KeypointArray inputKeyPoints2, Transform3Df pose1, Transform3Df pose2, Matrix3x3f intrinsicParams)
 {
     solar_api_featuresPINVOKE.IMatchesFilter_filter__SWIG_1(swigCPtr, DescriptorMatchVector.getCPtr(inputMatches), DescriptorMatchVector.getCPtr(outputMatches), KeypointArray.getCPtr(inputKeyPoints1), KeypointArray.getCPtr(inputKeyPoints2), Transform3Df.getCPtr(pose1), Transform3Df.getCPtr(pose2), Matrix3x3f.getCPtr(intrinsicParams));
     if (solar_api_featuresPINVOKE.SWIGPendingException.Pending)
     {
         throw solar_api_featuresPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Exemplo n.º 17
0
        public override FrameworkReturnCode Proceed(Image camImage, Transform3Df pose, ICamera camera)
        {
            //            Action<IntVector, double> localBundleAdjuster = (IntVector framesIdxToBundle, double reprojError) =>
            //            {
            //                Transform3DfList correctedPoses = new Transform3DfList();
            //                CloudPointVector correctedCloud = new CloudPointVector();
            //                CameraParameters correctedCamera = new CameraParameters();
            //                reprojError = bundler.solve(mapper.getKeyframes(),
            //                                             mapper.getGlobalMap().getPointCloud(),
            //                                             camera.getIntrinsicsParameters(),
            //                                             camera.getDistorsionParameters(),
            //                                             framesIdxToBundle,
            //                                             correctedPoses,
            //                                             correctedCloud,
            //                                             correctedCamera.intrinsic,
            //                                             correctedCamera.distorsion);
            //                mapper.update(correctedPoses, correctedCloud);
            //            };
            //            binaryMarker.loadMarker().Check();
            //            patternDescriptorExtractor.extract(binaryMarker.getPattern(), markerPatternDescriptor);
            //            var patternSize = binaryMarker.getPattern().getSize();
            //            var binaryMarkerSize = binaryMarker.getSize();
            //            overlay3D.BindTo<IConfigurable>().getProperty("size").setFloatingValue(binaryMarkerSize.width, 0);
            //            overlay3D.BindTo<IConfigurable>().getProperty("size").setFloatingValue(binaryMarkerSize.height, 1);
            //            overlay3D.BindTo<IConfigurable>().getProperty("size").setFloatingValue(binaryMarkerSize.height / 2.0f, 2);

            //            patternDescriptorExtractor.BindTo<IConfigurable>().getProperty("patternSize").setIntegerValue(patternSize);
            //            patternReIndexer.BindTo<IConfigurable>().getProperty("sbPatternSize").setIntegerValue(patternSize);
            //            img2worldMapper.BindTo<IConfigurable>().getProperty("digitalWidth").setIntegerValue(patternSize);
            //            img2worldMapper.BindTo<IConfigurable>().getProperty("digitalHeight").setIntegerValue(patternSize);
            //            img2worldMapper.BindTo<IConfigurable>().getProperty("worldWidth").setFloatingValue(binaryMarkerSize.width);
            //            img2worldMapper.BindTo<IConfigurable>().getProperty("worldHeight").setFloatingValue(binaryMarkerSize.height);

            //            Func<Image, Transform3Df, bool> detectFiducialMarker = (Image image, Transform3Df localpose) =>
            //            {
            //                Image greyImage = SharedPtr.Alloc<Image>().AddTo(subscriptions);
            //                Image binaryImage = SharedPtr.Alloc<Image>().AddTo(subscriptions);

            //                Contour2DfArray contours = new Contour2DfArray().AddTo(subscriptions);
            //                Contour2DfArray filtered_contours = new Contour2DfArray().AddTo(subscriptions);
            //                ImageList patches = new ImageList().AddTo(subscriptions);
            //                Contour2DfArray recognizedContours = new Contour2DfArray().AddTo(subscriptions);
            //                DescriptorBuffer recognizedPatternsDescriptors = new DescriptorBuffer().AddTo(subscriptions);
            //                DescriptorBuffer markerPatternDescriptor = new DescriptorBuffer().AddTo(subscriptions);
            //                DescriptorMatchVector patternMatches = new DescriptorMatchVector().AddTo(subscriptions);
            //                Point2DfArray pattern2DPoints = new Point2DfArray().AddTo(subscriptions);
            //                Point2DfArray img2DPoints = new Point2DfArray().AddTo(subscriptions);
            //                Point3DfArray pattern3DPoints = new Point3DfArray().AddTo(subscriptions);

            //                bool marker_found = false;
            //                // Convert Image from RGB to grey
            //                imageConvertor.convert(image, greyImage, Image.ImageLayout.LAYOUT_GREY).Check();

            //                // Convert Image from grey to black and white
            //                imageFilterBinary.filter(greyImage, binaryImage);
            //                // Extract contours from binary image
            //                contoursExtractor.extract(binaryImage, contours);
            //                // Filter 4 edges contours to find those candidate for marker contours
            //                contoursFilter.filter(contours, filtered_contours);
            //                // Create one warpped and cropped image by contour
            //                perspectiveController.correct(binaryImage, filtered_contours, patches);
            //                // test if this last image is really a squared binary marker, and if it is the case, extract its descriptor
            //                if (patternDescriptorExtractor.extract(patches, filtered_contours, recognizedPatternsDescriptors, recognizedContours) != FrameworkReturnCode._ERROR_)
            //                {
            //                    // From extracted squared binary pattern, match the one corresponding to the squared binary marker
            //                    if (patternMatcher.match(markerPatternDescriptor, recognizedPatternsDescriptors, patternMatches) == IDescriptorMatcher.RetCode.DESCRIPTORS_MATCHER_OK)
            //                    {
            //                        // Reindex the pattern to create two vector of points, the first one corresponding to marker corner, the second one corresponding to the poitsn of the contour
            //                        patternReIndexer.reindex(recognizedContours, patternMatches, pattern2DPoints, img2DPoints);
            //                        // Compute the 3D position of each corner of the marker
            //                        img2worldMapper.map(pattern2DPoints, pattern3DPoints);
            //                        // Compute the pose of the camera using a Perspective n Points algorithm using only the 4 corners of the marker
            //                        if (pnp.estimate(img2DPoints, pattern3DPoints, pose) == FrameworkReturnCode._SUCCESS)
            //                        {
            //                            marker_found = true;
            //                        }
            //                    }
            //                }
            //                return marker_found;
            //            };

            //            if (detectFiducialMarker(camImage, poseFrame1))
            //            {
            //                keypointsDetector.detect(camImage, keypointsView1);
            //                descriptorExtractor.extract(camImage, keypointsView1, descriptorsView1);
            //                keyframe1 = new Datastructure.Keyframe(keypointsView1, descriptorsView1, camImage, poseFrame1);
            //                CloudPointVector tmpCP = new CloudPointVector();
            //                DescriptorMatchVector tmpDMV1 = new DescriptorMatchVector();
            //                DescriptorMatchVector tmpDMV2 = new DescriptorMatchVector();
            //                mapper.update(map, keyframe1, tmpCP, tmpDMV1, tmpDMV2);
            //                keyframePoses.Add(poseFrame1); // used for display
            //                kfRetriever.addKeyframe(keyframe1); // add keyframe for reloc
            //            }

            //            bool bootstrapOk = false;
            //            while (!bootstrapOk)
            //            {
            //                if (camera.getNextImage(view2) == FrameworkReturnCode._ERROR_)
            //                    break;

            //                if (!detectFiducialMarker(view2, poseFrame2))
            //                {
            //                    if (imageViewer.display(view2) == FrameworkReturnCode._STOP)
            //                        return FrameworkReturnCode._ERROR_;
            //                    continue;
            //                }
            //                float disTwoKeyframes = (float)Math.Sqrt(Math.Pow(poseFrame1.translation().coeff(0, 0) - poseFrame2.translation().coeff(0, 0), 2.0f) + Math.Pow(poseFrame1.translation().coeff(1, 0) - poseFrame2.translation().coeff(1, 0), 2.0f) +
            //                    Math.Pow(poseFrame1.translation().coeff(2, 0) - poseFrame2.translation().coeff(2, 0), 2.0f));

            //                if (disTwoKeyframes < 0.1)
            //                {
            //                    if (imageViewer.display(view2) == FrameworkReturnCode._STOP)
            //                        return FrameworkReturnCode._STOP;
            //                    continue;
            //                }

            //                keypointsDetector.detect(view2, keypointsView2);
            //                descriptorExtractor.extract(view2, keypointsView2, descriptorsView2);
            //                Frame frame2 = new Frame(keypointsView2, descriptorsView2, view2, keyframe1);
            //                matcher.match(descriptorsView1, descriptorsView2, matches);
            //                int nbOriginalMatches = matches.Count;
            //                matchesFilter.filter(matches, matches, keypointsView1, keypointsView2);

            //                matchesOverlay.draw(view2, imageMatches, keypointsView1, keypointsView2, matches);
            //                if (imageViewer.display(imageMatches) == FrameworkReturnCode._STOP)
            //                    return FrameworkReturnCode._STOP;

            //                if (keyframeSelector.select(frame2, matches))
            //                {
            //                    frame2.setPose(poseFrame2);
            //                    // Triangulate
            //                    keyframe2 = new Datastructure.Keyframe(frame2);
            //                    triangulator.triangulate(keyframe2, matches, cloud);
            //                    //double reproj_error = triangulator->triangulate(keypointsView1, keypointsView2, matches, std::make_pair(0, 1), poseFrame1, poseFrame2, cloud);
            //                    mapFilter.filter(poseFrame1, poseFrame2, cloud, filteredCloud);
            //                    keyframePoses.Add(poseFrame2); // used for display
            //                    DescriptorMatchVector tmpDMV = new DescriptorMatchVector();
            //                    mapper.update(map, keyframe2, filteredCloud, matches, tmpDMV);
            //                    kfRetriever.addKeyframe(keyframe2); // add keyframe for reloc
            //                    if (bundling)
            //                    {
            //                        IntVector firstIdxKFs = new IntVector() { 0, 1 };
            //                        localBundleAdjuster(firstIdxKFs, bundleReprojError);
            //                    }
            //                    bootstrapOk = true;
            //                }
            //            }
            //            Action<Datastructure.Keyframe, CloudPointVector, UIntVector, Datastructure.Keyframe, Frame> updateData =
            //            (Datastructure.Keyframe refKf, CloudPointVector localMap, UIntVector idxLocalMap, Datastructure.Keyframe referenceKeyframe, Frame frameToTrack) =>
            //            {
            //                referenceKeyframe = refKf;
            //                frameToTrack = new Frame(referenceKeyframe);
            //                frameToTrack.setReferenceKeyframe(referenceKeyframe);
            //                idxLocalMap.Clear();
            //                localMap.Clear();
            //                mapper.getLocalMapIndex(referenceKeyframe, idxLocalMap);
            //                foreach (var it in idxLocalMap)
            //                    localMap.Add(mapper.getGlobalMap().getAPoint((int)it));
            //            };

            //            // check need to make a new keyframe based on all existed keyframes
            //            Func<Frame, bool> checkNeedNewKfWithAllKfs = (Frame newFrame)
            //             =>
            //            {
            //                KeyframeList ret_keyframes = new KeyframeList();
            //                if (kfRetriever.retrieve(newFrame, ret_keyframes) == FrameworkReturnCode._SUCCESS)
            //                {
            //                    if (ret_keyframes[0].m_idx != referenceKeyframe.m_idx)
            //                    {
            //                        updatedRefKf = ret_keyframes[0];
            //                        return true;
            //                    }
            //                    return false;
            //                }
            //                else
            //                    return false;
            //            };

            //            // checkDisparityDistance
            //            Func<Frame, bool> checkDisparityDistance = (Frame newFrame)
            //             =>
            //            {
            //                CloudPointVector cloudPoint = map.getPointCloud();
            //                KeypointArray refKeypoints = referenceKeyframe.getKeypoints();
            //                MapIntInt refMapVisibility = referenceKeyframe.getVisibleMapPoints();
            //                CloudPointVector cpRef = new CloudPointVector();
            //                Point2DfArray projected2DPts = new Point2DfArray();
            //                Point2DfArray ref2DPts = new Point2DfArray();

            //                foreach (var it in refMapVisibility)
            //                {
            //                    cpRef.Add(cloudPoint[(int)it.Value]);
            //                    ref2DPts.Add(new Point2Df(refKeypoints[(int)it.Key].getX(), refKeypoints[(int)it.Key].getY()));
            //                }
            //                projector.project(cpRef, projected2DPts, newFrame.getPose());

            //                uint imageWidth = newFrame.getView().getWidth();
            //                double totalMatchesDist = 0.0;
            //                for (int i = 0; i < projected2DPts.Count; i++)
            //                {
            //                    Point2Df pt1 = ref2DPts[i];
            //                    Point2Df pt2 = projected2DPts[i];

            //                    totalMatchesDist += Math.Sqrt(((pt1.getX() - pt2.getX()) * (pt1.getX() - pt2.getX())) + ((pt1.getY() - pt2.getY()) * (pt1.getY() - pt2.getY()))) / imageWidth;
            //                }
            //                double meanMatchesDist = totalMatchesDist / projected2DPts.Count;
            //                return (meanMatchesDist > 0.07);
            //            };

            //            Action<Datastructure.Keyframe> updateAssociateCloudPoint = (Datastructure.Keyframe newKf) =>
            //            {
            //                MapIntInt newkf_mapVisibility = newKf.getVisibleMapPoints();
            //                MapIntInt kfCounter = new MapIntInt();
            //                foreach (var it in newkf_mapVisibility)
            //                {
            //                    CloudPoint cp = map.getAPoint((int)it.Value);
            //                    // calculate the number of connections to other keyframes
            //                    MapIntInt cpKfVisibility = cp.getVisibility();
            //                    foreach (var it_kf in cpKfVisibility)
            //                        kfCounter[it_kf.Value]++;
            //                    ///// update descriptor of cp: des_cp = ((des_cp * cp.getVisibility().size()) + des_buf) / (cp.getVisibility().size() + 1)
            //                    //// TO DO
            //                    cp.visibilityAddKeypoint((uint)newKf.m_idx, it.Key);
            //                }

            //                foreach (var it in kfCounter)
            //                    if ((it.Key != newKf.m_idx) && (it.Value > 20))
            //                    {
            //                        newKf.addNeighborKeyframe(it.Key, it.Value);
            //                    }
            //            };

            //            Action<Datastructure.Keyframe, IntVector, List<Tuple<uint, int, uint>>, CloudPointVector> findMatchesAndTriangulation =
            //                                           (Datastructure.Keyframe newKf, IntVector idxBestNeighborKfs,
            //                                           List<Tuple<uint, int, uint>> infoMatches, CloudPointVector cloudPoint) =>
            //            {
            //                MapIntInt newKf_mapVisibility = newKf.getVisibleMapPoints();
            //                DescriptorBuffer newKf_des = newKf.getDescriptors();
            //                KeypointArray newKf_kp = newKf.getKeypoints();
            //                Transform3Df newKf_pose = newKf.getPose();

            //                bool[] checkMatches = new bool[newKf_kp.Capacity];

            //                for (uint i = 0; i < newKf_kp.Count; ++i)
            //                    if (newKf_mapVisibility[i] != newKf_mapVisibility.Count)
            //                    {
            //                        checkMatches[i] = true;
            //                    }

            //                for (int i = 0; i < idxBestNeighborKfs.Count; ++i)
            //                {
            //                    IntVector newKf_indexKeypoints = new IntVector();
            //                    for (int j = 0; j < checkMatches.Length; ++j)
            //                        if (!checkMatches[j])
            //                            newKf_indexKeypoints.Add(j);

            //                    // get neighbor keyframe i
            //                    Datastructure.Keyframe tmpKf = mapper.getKeyframe(idxBestNeighborKfs[i]);
            //                    Transform3Df tmpPose = tmpKf.getPose();

            //                    // check distance between two keyframes
            //                    double distPose = Math.Sqrt(Math.Pow(newKf_pose.translation().coeff(0, 0) - tmpPose.translation().coeff(0, 0), 2.0f) + Math.Pow(newKf_pose.translation().coeff(0, 1)
            //                        - tmpPose.translation().coeff(0, 1), 2.0f) + Math.Pow(newKf_pose.translation().coeff(0, 2) - tmpPose.translation().coeff(0, 2), 2.0f));
            //                    if (distPose < 0.05)
            //                        continue;

            //                    // Matching based on BoW
            //                    DescriptorMatchVector tmpMatches = new DescriptorMatchVector();
            //                    DescriptorMatchVector goodMatches = new DescriptorMatchVector();

            //                    kfRetriever.match(newKf_indexKeypoints, newKf_des, idxBestNeighborKfs[i], tmpMatches);

            //                    // matches filter based epipolar lines
            //                    matchesFilter.filter(tmpMatches, tmpMatches, newKf_kp, tmpKf.getKeypoints(), newKf.getPose(), tmpKf.getPose(), camera.getIntrinsicsParameters());

            //                    // find info to triangulate
            //                    List<Tuple<uint, int, uint>> tmpInfoMatches = new List<Tuple<uint, int, uint>>();
            //                    MapIntInt tmpMapVisibility = tmpKf.getVisibleMapPoints();
            //                    for (int j = 0; j < tmpMatches.Capacity; ++j)
            //                    {
            //                        uint idx_newKf = tmpMatches[j].getIndexInDescriptorA();
            //                        uint idx_tmpKf = tmpMatches[j].getIndexInDescriptorB();
            //                        if ((!checkMatches[idx_newKf]) && !(tmpMapVisibility.Keys.Contains(idx_tmpKf)))
            //                        {
            //                            tmpInfoMatches.Add(new Tuple<uint, int, uint>(idx_newKf, idxBestNeighborKfs[i], idx_tmpKf));
            //                            goodMatches.Add(tmpMatches[j]);
            //                        }
            //                    }

            //                    // triangulation
            //                    CloudPointVector tmpCloudPoint = new CloudPointVector();
            //                    CloudPointVector tmpFilteredCloudPoint = new CloudPointVector();

            //                    IntVector indexFiltered = new IntVector();
            //                    if (goodMatches.Count > 0)
            //                        triangulator.triangulate(newKf_kp, tmpKf.getKeypoints(), newKf_des, tmpKf.getDescriptors(), goodMatches,
            //                            new PairUIntUInt((uint)newKf.m_idx, (uint)idxBestNeighborKfs[i]), newKf.getPose(), tmpKf.getPose(), tmpCloudPoint);

            //                    // filter cloud points
            //                    if (tmpCloudPoint.Count > 0)
            //                        mapFilter.filter(newKf.getPose(), tmpKf.getPose(), tmpCloudPoint, tmpFilteredCloudPoint, indexFiltered);
            //                    for (int o = 0; o < indexFiltered.Count; ++o)
            //                    {
            //                        checkMatches[(tmpInfoMatches[indexFiltered[o]]).Item1] = true;
            //                        infoMatches.Add(tmpInfoMatches[indexFiltered[o]]);
            //                        cloudPoint.Add(tmpFilteredCloudPoint[o]);
            //                    }
            //                }
            //            };

            //            Action<Datastructure.Keyframe, UIntVector, List<Tuple<uint, int, uint>>, CloudPointVector> fuseCloudPoint =
            //                (Datastructure.Keyframe newKeyframe, UIntVector idxNeigborKfs, List<Tuple<uint, int, uint>> infoMatches, CloudPointVector newCloudPoint)
            //                =>
            //                {
            //                    bool[] checkMatches = new bool[newCloudPoint.Capacity];
            //                    for (int i = 0; i < checkMatches.Length; i++)
            //                        checkMatches[i] = true;

            //                    DescriptorBufferList desNewCloudPoint = new DescriptorBufferList();

            //                    foreach (var it_cp in newCloudPoint)
            //                    {
            //                        desNewCloudPoint.Add(it_cp.getDescriptor());
            //                    }

            //                    for (int i = 0; i < idxNeigborKfs.Count; ++i)
            //                    {
            //                        // get a neighbor
            //                        Datastructure.Keyframe neighborKf = mapper.getKeyframe((int)idxNeigborKfs[i]);
            //                        MapIntInt mapVisibilitiesNeighbor = neighborKf.getVisibleMapPoints();

            //                        //  projection points
            //                        Point2DfArray projected2DPts = new Point2DfArray();
            //                        projector.project(newCloudPoint, projected2DPts, neighborKf.getPose());

            //                        DescriptorMatchVector allMatches = new DescriptorMatchVector();
            //                        matcher.matchInRegion(projected2DPts, desNewCloudPoint, neighborKf, allMatches, 5.0f);

            //                        for (int j = 0; j < allMatches.Count; ++j)
            //                        {
            //                            int idxNewCloudPoint = (int)allMatches[j].getIndexInDescriptorA();
            //                            int idxKpNeighbor = (int)allMatches[j].getIndexInDescriptorB();
            //                            if (!checkMatches[idxNewCloudPoint])
            //                                continue;
            //                            Tuple<uint, int, uint> infoMatch = infoMatches[idxNewCloudPoint];

            //                            // check this cloud point is created from the same neighbor keyframe
            //                            if (infoMatch.Item2 == idxNeigborKfs[i])
            //                                continue;

            //                            // check if have a cloud point in the neighbor keyframe is coincide with this cloud point.
            //                            bool it_cp = mapVisibilitiesNeighbor.ContainsKey((uint)idxKpNeighbor);
            //                            if (it_cp)
            //                            {
            //                                // fuse
            //                                CloudPoint old_cp = map.getAPoint((int)mapVisibilitiesNeighbor[(uint)idxKpNeighbor]);
            //                                old_cp.visibilityAddKeypoint((uint)newKeyframe.m_idx, infoMatch.Item1);
            //                                old_cp.visibilityAddKeypoint((uint)infoMatch.Item2, infoMatch.Item3);

            //                                newKeyframe.addVisibleMapPoint(infoMatch.Item1, mapVisibilitiesNeighbor[(uint)idxKpNeighbor]);
            //                                mapper.getKeyframe(infoMatch.Item2).addVisibleMapPoint(infoMatch.Item3, mapVisibilitiesNeighbor[(uint)idxKpNeighbor]);

            //                                checkMatches[idxNewCloudPoint] = false;
            //                            }
            //                        }
            //                    }
            //                    List<Tuple<uint, int, uint>> tmpInfoMatches = new List<Tuple<uint, int, uint>>();
            //                    CloudPointVector tmpNewCloudPoint = new CloudPointVector();
            //                    for (int i = 0; i < checkMatches.Length; ++i)
            //                        if (checkMatches[i])
            //                        {
            //                            tmpInfoMatches.Add(infoMatches[i]);
            //                            tmpNewCloudPoint.Add(newCloudPoint[i]);
            //                        }
            //                    infoMatches = tmpInfoMatches;
            //                    newCloudPoint = tmpNewCloudPoint;
            //                };

            //            Func<Frame, Frame> processNewKeyframe = (Frame newFrame) =>
            //            {
            //                // create a new keyframe from the current frame
            //                Datastructure.Keyframe newKeyframe = new Datastructure.Keyframe(newFrame);
            //                // Add to BOW retrieval
            //                kfRetriever.addKeyframe(newKeyframe);
            //                // Update keypoint visibility, descriptor in cloud point and connections between new keyframe with other keyframes
            //                updateAssociateCloudPoint(newKeyframe);
            //                // get best neighbor keyframes
            //                UIntVector idxBestNeighborKfs = newKeyframe.getBestNeighborKeyframes(4);
            //                IntVector tmp_idxBestNeighborKfs = new IntVector();
            //                for (int i = 0; i < idxBestNeighborKfs.Count; ++i)
            //                    tmp_idxBestNeighborKfs[i] = (int)idxBestNeighborKfs[i];
            //                // find matches between unmatching keypoints in the new keyframe and the best neighboring keyframes
            //                List<Tuple<uint, int, uint>> infoMatches = new List<Tuple<uint, int, uint>>(); // first: index of kp in newKf, second: index of Kf, third: index of kp in Kf.
            //                CloudPointVector newCloudPoint = new CloudPointVector();
            //                findMatchesAndTriangulation(newKeyframe, tmp_idxBestNeighborKfs, infoMatches, newCloudPoint);
            //                if (newCloudPoint.Count > 0)
            //                {
            //                    // fuse duplicate points
            //                    UIntVector idxNeigborKfs = newKeyframe.getBestNeighborKeyframes(10);
            //                    fuseCloudPoint(newKeyframe, idxNeigborKfs, infoMatches, newCloudPoint);
            //                }
            //                // mapper update
            //                mapper.update(map, newKeyframe, newCloudPoint, infoMatches);
            //                return newKeyframe;
            //            };

            //            // Prepare for tracking
            //            lastPose = poseFrame2;
            //            updateData(keyframe2, localMap, idxLocalMap, referenceKeyframe, frameToTrack);

            //            // Get current image
            //            camera.getNextImage(view);
            //            keypointsDetector.detect(view, keypointsView);
            //            descriptorExtractor.extract(view, keypointsView, descriptorsView);
            //            newFrame = new Frame(keypointsView, descriptorsView, view, referenceKeyframe);
            //            // match current keypoints with the keypoints of the Keyframe
            //            matcher.match(frameToTrack.getDescriptors(), descriptorsView, matches);
            //            matchesFilter.filter(matches, matches, frameToTrack.getKeypoints(), keypointsView);

            //            Point2DfArray pt2d = new Point2DfArray();
            //            Point3DfArray pt3d = new Point3DfArray();
            //            CloudPointVector foundPoints = new CloudPointVector();
            //            DescriptorMatchVector foundMatches = new DescriptorMatchVector();
            //            DescriptorMatchVector remainingMatches = new DescriptorMatchVector();
            //            corr2D3DFinder.find(frameToTrack, newFrame, matches, map, pt3d, pt2d, foundMatches, remainingMatches);
            //            // display matches
            //            imageMatches = view.copy();

            //            Point2DfArray imagePoints_inliers = new Point2DfArray();
            //            Point3DfArray worldPoints_inliers = new Point3DfArray();
            //            if (pnpRansac.estimate(pt2d, pt3d, imagePoints_inliers, worldPoints_inliers, newFramePose, lastPose) == FrameworkReturnCode._SUCCESS)
            //            {
            //                // Set the pose of the new frame
            //                newFrame.setPose(newFramePose);

            //                // refine pose and update map visibility of frame
            //                {
            //                    // get all keypoints of the new frame
            //                    KeypointArray keypoints = newFrame.getKeypoints();

            //                    //  projection points
            //                    Point2DfArray projected2DPts = new Point2DfArray();
            //                    projector.project(localMap, projected2DPts, newFrame.getPose());

            //                    DescriptorBufferList desAllLocalMap = new DescriptorBufferList();
            //                    foreach (var it_cp in localMap)
            //                    {
            //                        desAllLocalMap.Add(it_cp.getDescriptor());
            //                    }

            //                    // matches feature in region
            //                    DescriptorMatchVector allMatches = new DescriptorMatchVector();
            //                    matcher.matchInRegion(projected2DPts, desAllLocalMap, newFrame, allMatches, 5.0f);

            //                    Point2DfArray _pt2d = new Point2DfArray();
            //                    Point3DfArray _pt3d = new Point3DfArray();
            //                    MapIntInt newMapVisibility = new MapIntInt();


            //                    foreach (var it_match in allMatches)
            //                    {
            //                        int idx_2d = (int) it_match.getIndexInDescriptorB();
            //                        int idx_3d = (int) it_match.getIndexInDescriptorA();
            //                        _pt2d.Add(new Point2Df(keypoints[idx_2d].getX(), keypoints[idx_2d].getY()));
            //                        _pt3d.Add(new Point3Df(localMap[idx_3d].getX(), localMap[idx_3d].getY(), localMap[idx_3d].getZ()));
            //                        newMapVisibility[(uint)idx_2d] = idxLocalMap[idx_3d];
            //                    }

            //                    // pnp optimization
            //                    Transform3Df refinedPose = new Transform3Df();
            //                    pnp.estimate(pt2d, pt3d, refinedPose, newFrame.getPose());
            //                    newFrame.setPose(refinedPose);

            //                    // update map visibility of current frame
            //                    newFrame.addVisibleMapPoints(newMapVisibility);
            //                    overlay2D.drawCircles(pt2d, imageMatches);
            //                    overlay3D.draw(refinedPose, imageMatches);
            //                }
            //                lastPose = newFrame.getPose();

            //                // check need new keyframe
            //                if (checkDisparityDistance(newFrame))
            //                {
            //                    if (checkNeedNewKfWithAllKfs(newFrame))
            //                    {
            //                        updateData(updatedRefKf, localMap, idxLocalMap, referenceKeyframe, frameToTrack);
            //                    }
            //                    else
            //                    {
            //                        Datastructure.Keyframe newKeyframe = new Datastructure.Keyframe(processNewKeyframe(newFrame));
            //                        if (bundling)
            //                        {
            //                            // get current keyframe idx
            //                            int currentIdxKF = mapper.getKeyframes()[mapper.getKeyframes().Capacity - 1].m_idx;
            //                            // get keyfram connected graph
            //                            UIntVector bestIdx = mapper.getKeyframes()[currentIdxKF].getBestNeighborKeyframes(2);
            //                            // define 2 best keyframes + current keyframe
            //                            windowIdxBundling = new IntVector() {(int)(bestIdx[0]),(int)(bestIdx[1]),currentIdxKF };
            //                            //	windowIdxBundling = { currentIdxKF - 1, currentIdxKF }; // temporal sliding window
            //                            localBundleAdjuster(windowIdxBundling, bundleReprojError);
            //                        }
            //                        // update data
            //                        updateData(newKeyframe, localMap, idxLocalMap, referenceKeyframe, frameToTrack);
            //                        // add keyframe pose to display
            //                        keyframePoses.Add(newKeyframe.getPose());
            //                    }
            //                }
            //                else
            //                {
            //                    // update frame to track
            //                    frameToTrack = newFrame;
            //                }

            //                framePoses.Add(newFrame.getPose()); // used for display

            //                isLostTrack = false;    // tracking is good

            //            }
            //            else
            //            {
            //                isLostTrack = true;     // lost tracking
            //                                        // reloc
            //                KeyframeList ret_keyframes = new KeyframeList();
            //                if (kfRetriever.retrieve(newFrame, ret_keyframes) == FrameworkReturnCode._SUCCESS)
            //                {
            //                    // update data
            //                    updateData(ret_keyframes[0], localMap, idxLocalMap, referenceKeyframe, frameToTrack);
            //                    lastPose = referenceKeyframe.getPose();
            //                }
            //            }

            //            // display matches and a cube on the fiducial marker
            //            if (imageViewer.display(imageMatches) == FrameworkReturnCode._STOP)
            //                return FrameworkReturnCode._STOP;

            //            // display point cloud
            //            if (viewer3DPoints.display(map.getPointCloud(), lastPose, keyframePoses, framePoses, localMap) == FrameworkReturnCode._STOP)
            //                return FrameworkReturnCode._STOP;

            return(FrameworkReturnCode._SUCCESS);
        }
Exemplo n.º 18
0
 public abstract FrameworkReturnCode Proceed(Image inputImage, Transform3Df pose, ICamera camera);
Exemplo n.º 19
0
        public virtual FrameworkReturnCode project(CloudPointVector inputPoints, Point2DfArray imagePoints, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IProject_project__SWIG_2(swigCPtr, CloudPointVector.getCPtr(inputPoints), Point2DfArray.getCPtr(imagePoints), Transform3Df.getCPtr(pose));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 20
0
        public virtual FrameworkReturnCode estimate(Point2DfArray imagePoints, Point3DfArray worldPoints, Point2DfArray imagePoints_inlier, Point3DfArray worldPoints_inlier, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_solver_posePINVOKE.I3DTransformSACFinderFrom2D3D_estimate__SWIG_1(swigCPtr, Point2DfArray.getCPtr(imagePoints), Point3DfArray.getCPtr(worldPoints), Point2DfArray.getCPtr(imagePoints_inlier), Point3DfArray.getCPtr(worldPoints_inlier), Transform3Df.getCPtr(pose));

            if (solar_api_solver_posePINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_solver_posePINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 21
0
        protected override void OnEnable()
        {
            base.OnEnable();

            xpcfComponentManager = xpcf_api.getComponentManagerInstance();
            Disposable.Create(xpcfComponentManager.clear).AddTo(subscriptions);
            xpcfComponentManager.AddTo(subscriptions);

#if !UN
            conf.path = File.ReadAllText("confPath.txt");
#endif
            if (xpcfComponentManager.load(conf.path) != XPCFErrorCode._SUCCESS)
            {
                Debug.LogErrorFormat("Failed to load the configuration file {0}", conf.path);
                enabled = false;
                return;
            }

            switch (mode)
            {
            case PIPELINE.Fiducial:
                pipeline = new FiducialPipeline(xpcfComponentManager).AddTo(subscriptions);
                break;

            case PIPELINE.Natural:
                pipeline = new NaturalPipeline(xpcfComponentManager).AddTo(subscriptions);
                break;

            case PIPELINE.SLAM:
                pipeline = new SlamPipeline(xpcfComponentManager).AddTo(subscriptions);
                break;
            }

            overlay3D = xpcfComponentManager.Create("SolAR3DOverlayOpencv").BindTo <I3DOverlay>().AddTo(subscriptions);

            switch (source)
            {
            case SOURCE.SolAR:
                camera = xpcfComponentManager.Create("SolARCameraOpencv").BindTo <ICamera>().AddTo(subscriptions);

                var intrinsic  = camera.getIntrinsicsParameters();
                var distorsion = camera.getDistorsionParameters();
                var resolution = camera.getResolution();
                pipeline.SetCameraParameters(intrinsic, distorsion);
                overlay3D.setCameraParameters(intrinsic, distorsion);
                OnCalibrate?.Invoke(resolution, intrinsic, distorsion);

                if (camera.start() != FrameworkReturnCode._SUCCESS)
                {
                    LOG_ERROR("Camera cannot start");
                    enabled = false;
                }
                break;

            case SOURCE.Unity:
                webcam = new WebCamTexture();
                webcam.Play();
                if (!webcam.isPlaying)
                {
                    LOG_ERROR("Camera cannot start");
                    enabled = false;
                }
                break;
            }

            switch (display)
            {
            case DISPLAY.SolAR:
                // Set the size of the box to display according to the marker size in world unit
                var overlay3D_sizeProp = overlay3D.BindTo <IConfigurable>().getProperty("size");
                var size = pipeline.GetMarkerSize();
                overlay3D_sizeProp.setFloatingValue(size.width, 0);
                overlay3D_sizeProp.setFloatingValue(size.height, 1);
                overlay3D_sizeProp.setFloatingValue(size.height / 2.0f, 2);

                imageViewer = xpcfComponentManager.Create("SolARImageViewerOpencv").AddTo(subscriptions).BindTo <IImageViewer>().AddTo(subscriptions);
                break;

            case DISPLAY.Unity:
                break;
            }

            start = clock();

            inputImage = SharedPtr.Alloc <Image>().AddTo(subscriptions);
            pose       = new Transform3Df().AddTo(subscriptions);
        }
Exemplo n.º 22
0
        public virtual FrameworkReturnCode unproject(KeypointArray imageKeypoints, Point3DfArray worldPoints, Transform3Df pose)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_geomPINVOKE.IUnproject_unproject__SWIG_2(swigCPtr, KeypointArray.getCPtr(imageKeypoints), Point3DfArray.getCPtr(worldPoints), Transform3Df.getCPtr(pose));

            if (solar_api_geomPINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_geomPINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }
Exemplo n.º 23
0
        public override FrameworkReturnCode Proceed(Image camImage, Transform3Df pose, ICamera camera)
        {
            // initialize overlay 3D component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            overlay3DComponent.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize pose estimation based on planar points with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            poseEstimationPlanar.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize projection component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            projection.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // initialize unprojection component with the camera intrinsec parameters (please refeer to the use of intrinsec parameters file)
            unprojection.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());
            if (!isTrack)
            {
                kpDetector.detect(camImage, camKeypoints);
                descriptorExtractor.extract(camImage, camKeypoints, camDescriptors);
                matcher.match(refDescriptors, camDescriptors, matches);
                basicMatchesFilter.filter(matches, matches, refKeypoints, camKeypoints);
                geomMatchesFilter.filter(matches, matches, refKeypoints, camKeypoints);

                var ref2Dpoints = new Point2DfArray();
                var cam2Dpoints = new Point2DfArray();
                var ref3Dpoints = new Point3DfArray();

                if (matches.Count > 10)
                {
                    keypointsReindexer.reindex(refKeypoints, camKeypoints, matches, ref2Dpoints, cam2Dpoints).Check();
                    img_mapper.map(ref2Dpoints, ref3Dpoints).Check();
                    if (poseEstimationPlanar.estimate(cam2Dpoints, ref3Dpoints, imagePoints_inliers, worldPoints_inliers, pose) != FrameworkReturnCode._SUCCESS)
                    {
                        valid_pose = false;
                        //LOG_DEBUG("Wrong homography for this frame");
                    }
                    else
                    {
                        isTrack = true;
                        needNewTrackedPoints = true;
                        valid_pose           = true;
                        previousCamImage     = camImage.copy();
                        //LOG_INFO("Start tracking", pose.matrix());
                    }
                }
            }
            else
            {
                // initialize points to track
                if (needNewTrackedPoints)
                {
                    imagePoints_track.Clear();
                    worldPoints_track.Clear();
                    KeypointArray newKeypoints = new KeypointArray();
                    // Get the projection of the corner of the marker in the current image
                    projection.project(markerWorldCorners, projectedMarkerCorners, pose);

                    // Detect the keypoints within the contours of the marker defined by the projected corners
                    kpDetectorRegion.detect(previousCamImage, projectedMarkerCorners, newKeypoints);

                    if (newKeypoints.Count > updateTrackedPointThreshold)
                    {
                        foreach (var keypoint in newKeypoints)
                        {
                            //imagePoints_track.push_back(xpcf::utils::make_shared<Point2Df>(keypoint->getX(), keypoint->getY()));
                            imagePoints_track.Add(new Point2Df(keypoint.getX(), keypoint.getY()));
                        }
                        // get back the 3D positions of the detected keypoints in world space
                        unprojection.unproject(imagePoints_track, worldPoints_track, pose);
                        //LOG_DEBUG("Reinitialize points to track");
                    }
                    else
                    {
                        isTrack = false;
                        //LOG_DEBUG("Cannot reinitialize points to track");
                    }
                    needNewTrackedPoints = false;
                }

                // Tracking mode
                if (isTrack)
                {
                    Point2DfArray trackedPoints = new Point2DfArray();
                    Point2DfArray pts2D         = new Point2DfArray();
                    Point3DfArray pts3D         = new Point3DfArray();

                    UCharList status = new UCharList();
                    FloatList err    = new FloatList();

                    // tracking 2D-2D
                    opticalFlow.estimate(previousCamImage, camImage, imagePoints_track, trackedPoints, status, err);

                    for (int i = 0; i < status.Count; i++)
                    {
                        if (status[i] == 1)
                        {
                            pts2D.Add(trackedPoints[i]);
                            pts3D.Add(worldPoints_track[i]);
                        }
                    }
                    // calculate camera pose
                    // Estimate the pose from the 2D-3D planar correspondence
                    if (poseEstimationPlanar.estimate(pts2D, pts3D, imagePoints_track, worldPoints_track, pose) != FrameworkReturnCode._SUCCESS)
                    {
                        isTrack              = false;
                        valid_pose           = false;
                        needNewTrackedPoints = false;
                        //LOG_INFO("Tracking lost");
                    }
                    else
                    {
                        valid_pose       = true;
                        previousCamImage = camImage.copy();
                        if (worldPoints_track.Count < updateTrackedPointThreshold)
                        {
                            needNewTrackedPoints = true;
                        }
                    }
                }
                //else
                //LOG_INFO("Tracking lost");
            }

            if (valid_pose)
            {
                // We draw a box on the place of the recognized natural marker
                overlay3DComponent.draw(pose, camImage);
            }
            //if (imageViewerResult.display(camImage) == FrameworkReturnCode._STOP) return FrameworkReturnCode._STOP;
            return(FrameworkReturnCode._SUCCESS);
        }
Exemplo n.º 24
0
        public override FrameworkReturnCode Proceed(Image inputImage, Transform3Df pose, ICamera camera)
        {
            overlay3DComponent.setCameraParameters(camera.getIntrinsicsParameters(), camera.getDistorsionParameters());

            // Convert Image from RGB to grey
            imageConvertor.convert(inputImage, greyImage, Image.ImageLayout.LAYOUT_GREY).Check();

            // Convert Image from grey to black and white
            imageFilterBinary.filter(greyImage, binaryImage).Check();

            // Extract contours from binary image
            contoursExtractor.extract(binaryImage, contours).Check();
#if !NDEBUG
            var contoursImage = binaryImage.copy();
            overlay2DContours.drawContours(contours, contoursImage);
#endif
            // Filter 4 edges contours to find those candidate for marker contours
            contoursFilter.filter(contours, filtered_contours).Check();

#if !NDEBUG
            var filteredContoursImage = binaryImage.copy();
            overlay2DContours.drawContours(filtered_contours, filteredContoursImage);
#endif
            // Create one warpped and cropped image by contour
            perspectiveController.correct(binaryImage, filtered_contours, patches).Check();

            // test if this last image is really a squared binary marker, and if it is the case, extract its descriptor
            if (patternDescriptorExtractor.extract(patches, filtered_contours, recognizedPatternsDescriptors, recognizedContours) == FrameworkReturnCode._SUCCESS)
            {
#if !NDEBUG
                var std__cout = new System.Text.StringBuilder();

                /*
                 * LOG_DEBUG("Looking for the following descriptor:");
                 * for (var i = 0; i < markerPatternDescriptor.getNbDescriptors() * markerPatternDescriptor.getDescriptorByteSize(); i++)
                 * {
                 *
                 *  if (i % patternSize == 0)
                 *      std__cout.Append("[");
                 *  if (i % patternSize != patternSize - 1)
                 *      std__cout.Append(markerPatternDescriptor.data()[i]).Append(", ");
                 *  else
                 *      std__cout.Append(markerPatternDescriptor.data()[i]).Append("]").AppendLine();
                 * }
                 * std__cout.AppendLine();
                 */

                /*
                 * std__cout.Append(recognizedPatternsDescriptors.getNbDescriptors()).Append(" recognized Pattern Descriptors ").AppendLine();
                 * uint desrciptorSize = recognizedPatternsDescriptors.getDescriptorByteSize();
                 * for (uint i = 0; i < recognizedPatternsDescriptors.getNbDescriptors() / 4; i++)
                 * {
                 *  for (int j = 0; j < patternSize; j++)
                 *  {
                 *      for (int k = 0; k < 4; k++)
                 *      {
                 *          std__cout.Append("[");
                 *          for (int l = 0; l < patternSize; l++)
                 *          {
                 *              std__cout.Append(recognizedPatternsDescriptors.data()[desrciptorSize*((i*4)+k) + j*patternSize + l]);
                 *              if (l != patternSize - 1)
                 *                  std__cout.Append(", ");
                 *          }
                 *          std__cout.Append("]");
                 *      }
                 *      std__cout.AppendLine();
                 *  }
                 *  std__cout.AppendLine().AppendLine();
                 * }
                 */

                /*
                 * std__cout.Append(recognizedContours.Count).Append(" Recognized Pattern contour ").AppendLine();
                 * for (int i = 0; i < recognizedContours.Count / 4; i++)
                 * {
                 *  for (int j = 0; j < recognizedContours[i].Count; j++)
                 *  {
                 *      for (int k = 0; k < 4; k++)
                 *      {
                 *          std__cout.Append("[").Append(recognizedContours[i * 4 + k][j].getX()).Append(", ").Append(recognizedContours[i * 4 + k][j].getY()).Append("] ");
                 *      }
                 *      std__cout.AppendLine();
                 *  }
                 *  std__cout.AppendLine().AppendLine();
                 * }
                 * std__cout.AppendLine();
                 */
#endif

                // From extracted squared binary pattern, match the one corresponding to the squared binary marker
                if (patternMatcher.match(markerPatternDescriptor, recognizedPatternsDescriptors, patternMatches) == Api.Features.IDescriptorMatcher.RetCode.DESCRIPTORS_MATCHER_OK)
                {
#if !NDEBUG
                    std__cout.Append("Matches :").AppendLine();
                    for (int num_match = 0; num_match < patternMatches.Count; num_match++)
                    {
                        std__cout.Append("Match [").Append(patternMatches[num_match].getIndexInDescriptorA()).Append(",").Append(patternMatches[num_match].getIndexInDescriptorB()).Append("], dist = ").Append(patternMatches[num_match].getMatchingScore()).AppendLine();
                    }
                    std__cout.AppendLine().AppendLine();
#endif

                    // Reindex the pattern to create two vector of points, the first one corresponding to marker corner, the second one corresponding to the poitsn of the contour
                    patternReIndexer.reindex(recognizedContours, patternMatches, pattern2DPoints, img2DPoints).Check();
#if !NDEBUG
                    LOG_DEBUG("2D Matched points :");
                    for (int i = 0; i < img2DPoints.Count; i++)
                    {
                        LOG_DEBUG("{0}", img2DPoints[i]);
                    }
                    for (int i = 0; i < pattern2DPoints.Count; i++)
                    {
                        LOG_DEBUG("{0}", pattern2DPoints[i]);
                    }
                    overlay2DCircles.drawCircles(img2DPoints, inputImage);
#endif
                    // Compute the 3D position of each corner of the marker
                    img2worldMapper.map(pattern2DPoints, pattern3DPoints).Check();
#if !NDEBUG
                    std__cout.Append("3D Points position:").AppendLine();
                    for (int i = 0; i < pattern3DPoints.Count; i++)
                    {
                        LOG_DEBUG("{0}", pattern3DPoints[i]);
                    }
#endif
                    // Compute the pose of the camera using a Perspective n Points algorithm using only the 4 corners of the marker
                    if (PnP.estimate(img2DPoints, pattern3DPoints, pose) == FrameworkReturnCode._SUCCESS)
                    {
                        overlay3DComponent.draw(pose, inputImage);
                        return(FrameworkReturnCode._SUCCESS);
                    }
                }
#if !NDEBUG
                //LOG_DEBUG(std__cout.ToString());
                std__cout.Clear();
#endif
            }
            return(FrameworkReturnCode._ERROR_);
        }
Exemplo n.º 25
0
        public virtual FrameworkReturnCode estimate(KeypointArray pointsView1, KeypointArray pointsView2, Transform3Df poseView1, Transform3Df poseView2, DescriptorMatchVector inlierMatches)
        {
            FrameworkReturnCode ret = (FrameworkReturnCode)solar_api_solver_posePINVOKE.I3DTransformFinderFrom2D2D_estimate__SWIG_1(swigCPtr, KeypointArray.getCPtr(pointsView1), KeypointArray.getCPtr(pointsView2), Transform3Df.getCPtr(poseView1), Transform3Df.getCPtr(poseView2), DescriptorMatchVector.getCPtr(inlierMatches));

            if (solar_api_solver_posePINVOKE.SWIGPendingException.Pending)
            {
                throw solar_api_solver_posePINVOKE.SWIGPendingException.Retrieve();
            }
            return(ret);
        }