Exemplo n.º 1
0
 /// <summary>
 /// These functions try to match the given images and to estimate rotations of each camera.
 /// </summary>
 /// <param name="pano">Final pano.</param>
 /// <returns>Status code.</returns>
 public Stitcher.Status ComposePanorama(IOutputArray pano)
 {
     using (OutputArray oaPano = pano.GetOutputArray())
     {
         return(StitchingInvoke.cveStitcherComposePanorama1(_ptr, oaPano));
     }
 }
Exemplo n.º 2
0
        /// <summary>
        /// Compute the panoramic images given the images
        /// </summary>
        /// <param name="images">The input images</param>
        /// <returns>The panoramic image</returns>
        public Image <Bgr, Byte> Stitch(Image <Bgr, Byte>[] images)
        {
            IntPtr[] ptrs = new IntPtr[images.Length];
            for (int i = 0; i < images.Length; ++i)
            {
                ptrs[i] = images[i].Ptr;
            }

            GCHandle handle         = GCHandle.Alloc(ptrs, GCHandleType.Pinned);
            IntPtr   resultIplImage = StitchingInvoke.CvStitcherStitch(_ptr, handle.AddrOfPinnedObject(), images.Length);

            handle.Free();

            if (resultIplImage == IntPtr.Zero)
            {
                throw new ArgumentException("Requires more images");
            }

            MIplImage         tmp    = (MIplImage)Marshal.PtrToStructure(resultIplImage, typeof(MIplImage));
            Image <Bgr, Byte> result = new Image <Bgr, byte>(tmp.width, tmp.height);

            CvInvoke.cvCopy(resultIplImage, result, IntPtr.Zero);
            CvInvoke.cvReleaseImage(ref resultIplImage);
            return(result);
        }
Exemplo n.º 3
0
 /// <summary>
 /// Create a new minimum graph cut-based seam estimator.
 /// </summary>
 /// <param name="costFunc">The cost function</param>
 /// <param name="terminalCost">The terminal cost</param>
 /// <param name="badRegionPenalty">Bad Region penalty</param>
 public GraphCutSeamFinder(
     CostFunction costFunc  = CostFunction.Color,
     float terminalCost     = 1.0f,
     float badRegionPenalty = 1.0f)
 {
     _ptr = StitchingInvoke.cveGraphCutSeamFinderCreate(costFunc, terminalCost, badRegionPenalty, ref _seamFinderPtr);
 }
Exemplo n.º 4
0
 /// <summary>
 /// Release all the unmanaged memory associated with this exposure compensator
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveNoExposureCompensatorRelease(ref _ptr);
     }
 }
Exemplo n.º 5
0
 public SurfFeaturesFinderGpu(
     double hessThresh, int numOctaves, int numLayers,
     int numOctavesDescr, int numLayersDescr)
 {
     _ptr = StitchingInvoke.cveSurfFeaturesFinderGpuCreate(
         hessThresh, numOctaves, numLayers, numOctavesDescr, numLayersDescr,
         ref _featuresFinderPtr);
 }
Exemplo n.º 6
0
 /// <summary>
 /// Release the unmanaged memory associated with this warper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveCylindricalWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 7
0
 /// <summary>
 /// Release the unmanaged memory associated with this warper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cvePlaneWarperGpuRelease(ref _ptr);
     }
 }
Exemplo n.º 8
0
 /// <summary>
 /// Release all the unmanaged memory associated with this bundle adjuster
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveBundleAdjusterReprojRelease(ref _ptr);
     }
 }
Exemplo n.º 9
0
 /// <summary>
 /// Release the unmanaged memory associated with this warper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveDetailFisheyeWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 10
0
 /// <summary>
 /// Release all the unmanaged memory associated with this blender
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveBestOf2NearestMatcherRelease(ref _ptr);
     }
 }
Exemplo n.º 11
0
 /// <summary>
 /// Blends and returns the final pano.
 /// </summary>
 /// <param name="dst">Final pano</param>
 /// <param name="dstMask">Final pano mask</param>
 public void Blend(IInputOutputArray dst, IInputOutputArray dstMask)
 {
     using (InputOutputArray ioaDst = dst.GetInputOutputArray())
         using (InputOutputArray ioaDstMask = dstMask.GetInputOutputArray())
         {
             StitchingInvoke.cveBlenderBlend(_blenderPtr, ioaDst, ioaDstMask);
         }
 }
Exemplo n.º 12
0
 /// <summary>
 /// Processes the image.
 /// </summary>
 /// <param name="img">Source image</param>
 /// <param name="mask">Source image mask</param>
 /// <param name="tl">Source image top-left corners</param>
 public void Feed(IInputArray img, IInputArray mask, Point tl)
 {
     using (InputArray iaImg = img.GetInputArray())
         using (InputArray iaMask = mask.GetInputArray())
         {
             StitchingInvoke.cveBlenderFeed(_blenderPtr, iaImg, iaMask, ref tl);
         }
 }
Exemplo n.º 13
0
 /// <summary>
 /// Prepares the blender for blending.
 /// </summary>
 /// <param name="corners">Source images top-left corners</param>
 /// <param name="sizes">Source image sizes</param>
 public void Prepare(Point[] corners, Size[] sizes)
 {
     using (VectorOfPoint vpCorners = new VectorOfPoint(corners))
         using (VectorOfSize vsSizes = new VectorOfSize(sizes))
         {
             StitchingInvoke.cveBlenderPrepare(_blenderPtr, vpCorners, vsSizes);
         }
 }
Exemplo n.º 14
0
 /// <summary>
 /// Release all unmanaged resources associated with this exposure compensator
 /// </summary>
 protected override void DisposeObject()
 {
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveGainCompensatorRelease(ref _ptr);
     }
     base.DisposeObject();
 }
Exemplo n.º 15
0
 /// <summary>
 /// Release all the unmanaged memory associated with this estimator
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveAffineBasedEstimatorRelease(ref _ptr);
     }
 }
Exemplo n.º 16
0
 /// <summary>
 /// Release all the unmanaged memory associated with this FeaturesFinder
 /// </summary>
 protected override void DisposeObject()
 {
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveAKAZEFeaturesFinderRelease(ref _ptr);
         _featuresFinderPtr = IntPtr.Zero;
     }
 }
Exemplo n.º 17
0
 /// <summary>
 /// Release all the unmanaged memory associated with this seam finder
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveNoSeamFinderRelease(ref _ptr);
     }
 }
Exemplo n.º 18
0
 /// <summary>
 /// Release memory associated with this stitcher
 /// </summary>
 protected override void DisposeObject()
 {
     if (_sharedPtr == IntPtr.Zero)
     {
         StitchingInvoke.cveStitcherRelease(ref _sharedPtr);
         _ptr = IntPtr.Zero;
     }
 }
Exemplo n.º 19
0
 /// <summary>
 /// Create the GPU version of SURF Features finder
 /// </summary>
 /// <param name="hessThresh">
 /// Only features with keypoint.hessian larger than that are extracted.
 /// good default value is ~300-500 (can depend on the average local contrast and sharpness of the image).
 /// user can further filter out some features based on their hessian values and other characteristics
 /// </param>
 /// <param name="numOctaves">
 /// The number of octaves to be used for extraction.
 /// With each next octave the feature size is doubled
 /// </param>
 /// <param name="numLayers">
 /// The number of layers within each octave
 /// </param>
 /// <param name="numOctavesDescr">The number of Octaves descriptors</param>
 /// <param name="numLayersDescr">The number of Layers descriptors</param>
 public SurfFeaturesFinderGpu(
     double hessThresh   = 300, int numOctaves   = 3, int numLayers = 4,
     int numOctavesDescr = 3, int numLayersDescr = 4)
 {
     _ptr = StitchingInvoke.cveSurfFeaturesFinderGpuCreate(
         hessThresh, numOctaves, numLayers, numOctavesDescr, numLayersDescr,
         ref FeaturesFinderPtr);
 }
Exemplo n.º 20
0
 /// <summary>
 /// Release all unmanaged resources associated with this blender
 /// </summary>
 protected override void DisposeObject()
 {
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveMultiBandBlenderRelease(ref _ptr);
     }
     base.DisposeObject();
 }
Exemplo n.º 21
0
 /// <summary>
 /// These functions try to match the given images and to estimate rotations of each camera.
 /// </summary>
 /// <param name="images">Input images.</param>
 /// <param name="masks">Masks for each input image specifying where to look for keypoints (optional).</param>
 /// <returns>Status code.</returns>
 public Stitcher.Status EstimateTransform(IInputArrayOfArrays images, IInputArrayOfArrays masks = null)
 {
     using (InputArray iaImages = images.GetInputArray())
         using (InputArray iaMasks = masks == null ? InputArray.GetEmpty() : masks.GetInputArray())
         {
             return(StitchingInvoke.cveStitcherEstimateTransform(_ptr, iaImages, iaMasks));
         }
 }
Exemplo n.º 22
0
 /// <summary>
 /// Release the unmanaged memory associated with this wraper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveTransverseMercatorWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 23
0
 /// <summary>
 /// Release the unmanaged memory associated with this wraper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cvePaniniPortraitWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 24
0
 /// <summary>
 /// Release the unmanaged memory associated with this wraper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveCompressedRectilinearWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 25
0
 /// <summary>
 /// Release the unmanaged memory associated with this wraper
 /// </summary>
 protected override void DisposeObject()
 {
     base.DisposeObject();
     if (_ptr != IntPtr.Zero)
     {
         StitchingInvoke.cveStereographicWarperRelease(ref _ptr);
     }
 }
Exemplo n.º 26
0
 /// <summary>
 /// Create a new features matcher
 /// </summary>
 /// <param name="fullAffine">Full Affine</param>
 /// <param name="tryUseGpu">If true, will try to use gpu</param>
 /// <param name="matchConf">Match confident</param>
 /// <param name="numMatchesThresh1">Number of matches threshold</param>
 public AffineBestOf2NearestMatcher(
     bool fullAffine       = false,
     bool tryUseGpu        = false,
     float matchConf       = 0.3f,
     int numMatchesThresh1 = 6)
 {
     _ptr = StitchingInvoke.cveAffineBestOf2NearestMatcherCreate(
         fullAffine,
         tryUseGpu,
         matchConf,
         numMatchesThresh1,
         ref _featuresMatcherPtr);
 }
Exemplo n.º 27
0
 /// <summary>
 /// Create a new features matcher
 /// </summary>
 /// <param name="tryUseGpu">If true, will try to use gpu.</param>
 /// <param name="matchConf">Match confident</param>
 /// <param name="numMatchesThresh1">Number of matches threshold</param>
 /// <param name="numMatchesThresh2">Number of matches threshold</param>
 public BestOf2NearestMatcher(
     bool tryUseGpu        = false,
     float matchConf       = 0.3f,
     int numMatchesThresh1 = 6,
     int numMatchesThresh2 = 6)
 {
     _ptr = StitchingInvoke.cveBestOf2NearestMatcherCreate(
         tryUseGpu,
         matchConf,
         numMatchesThresh1,
         numMatchesThresh2,
         ref _featuresMatcherPtr);
 }
Exemplo n.º 28
0
        /// <summary>
        /// Projects the image.
        /// </summary>
        /// <param name="src">Source image</param>
        /// <param name="K">Camera intrinsic parameters</param>
        /// <param name="R">Camera rotation matrix</param>
        /// <param name="interpMode">Interpolation mode</param>
        /// <param name="borderMode">Border extrapolation mode</param>
        /// <param name="dst">Projected image</param>
        /// <returns>Project image top-left corner</returns>
        public Point Warp(IInputArray src, IInputArray K, IInputArray R, CvEnum.Inter interpMode, CvEnum.BorderType borderMode, IOutputArray dst)
        {
            Point corner = new Point();

            using (InputArray iaSrc = src.GetInputArray())
                using (InputArray iaK = K.GetInputArray())
                    using (InputArray iaR = R.GetInputArray())
                        using (OutputArray oaDst = dst.GetOutputArray())
                        {
                            StitchingInvoke.cveRotationWarperWarp(_rotationWarper, iaSrc, iaK, iaR, interpMode, borderMode, oaDst, ref corner);
                            return(corner);
                        }
        }
Exemplo n.º 29
0
        /// <summary>
        /// Builds the projection maps according to the given camera data.
        /// </summary>
        /// <param name="srcSize">Source image size</param>
        /// <param name="K">Camera intrinsic parameters</param>
        /// <param name="R">Camera rotation matrix</param>
        /// <param name="xmap">Projection map for the x axis</param>
        /// <param name="ymap">Projection map for the y axis</param>
        /// <returns>Projected image minimum bounding box</returns>
        public Rectangle BuildMaps(Size srcSize, IInputArray K, IInputArray R, IOutputArray xmap, IOutputArray ymap)
        {
            Rectangle result = new Rectangle();

            using (InputArray iaK = K.GetInputArray())
                using (InputArray iaR = R.GetInputArray())
                    using (OutputArray oaXmap = xmap.GetOutputArray())
                        using (OutputArray oaYmap = ymap.GetOutputArray())
                        {
                            StitchingInvoke.cveRotationWarperBuildMaps(_rotationWarper, ref srcSize, iaK, iaR, oaXmap, oaYmap, ref result);
                            return(result);
                        }
        }
Exemplo n.º 30
0
 /// <summary>
 /// Creates an AKAZE features finder
 /// </summary>
 /// <param name="descriptorType">Type of the extracted descriptor</param>
 /// <param name="descriptorSize">Size of the descriptor in bits. 0 -> Full size</param>
 /// <param name="descriptorChannels">Number of channels in the descriptor (1, 2, 3)</param>
 /// <param name="threshold">Detector response threshold to accept point</param>
 /// <param name="nOctaveLayers"> Default number of sublevels per scale level</param>
 /// <param name="nOctaves">Maximum octave evolution of the image</param>
 /// <param name="diffusivity">Diffusivity type</param>
 public AKAZEFeaturesFinder(
     AKAZE.DescriptorType descriptorType,
     int descriptorSize,
     int descriptorChannels,
     float threshold,
     int nOctaves,
     int nOctaveLayers,
     KAZE.Diffusivity diffusivity)
 {
     _ptr = StitchingInvoke.cveAKAZEFeaturesFinderCreate(
         descriptorType, descriptorSize, descriptorChannels, threshold, nOctaves, nOctaveLayers, diffusivity,
         ref _featuresFinderPtr);
 }