/// <summary> /// These functions try to match the given images and to estimate rotations of each camera. /// </summary> /// <param name="pano">Final pano.</param> /// <returns>Status code.</returns> public Stitcher.Status ComposePanorama(IOutputArray pano) { using (OutputArray oaPano = pano.GetOutputArray()) { return(StitchingInvoke.cveStitcherComposePanorama1(_ptr, oaPano)); } }
/// <summary> /// Compute the panoramic images given the images /// </summary> /// <param name="images">The input images</param> /// <returns>The panoramic image</returns> public Image <Bgr, Byte> Stitch(Image <Bgr, Byte>[] images) { IntPtr[] ptrs = new IntPtr[images.Length]; for (int i = 0; i < images.Length; ++i) { ptrs[i] = images[i].Ptr; } GCHandle handle = GCHandle.Alloc(ptrs, GCHandleType.Pinned); IntPtr resultIplImage = StitchingInvoke.CvStitcherStitch(_ptr, handle.AddrOfPinnedObject(), images.Length); handle.Free(); if (resultIplImage == IntPtr.Zero) { throw new ArgumentException("Requires more images"); } MIplImage tmp = (MIplImage)Marshal.PtrToStructure(resultIplImage, typeof(MIplImage)); Image <Bgr, Byte> result = new Image <Bgr, byte>(tmp.width, tmp.height); CvInvoke.cvCopy(resultIplImage, result, IntPtr.Zero); CvInvoke.cvReleaseImage(ref resultIplImage); return(result); }
/// <summary> /// Create a new minimum graph cut-based seam estimator. /// </summary> /// <param name="costFunc">The cost function</param> /// <param name="terminalCost">The terminal cost</param> /// <param name="badRegionPenalty">Bad Region penalty</param> public GraphCutSeamFinder( CostFunction costFunc = CostFunction.Color, float terminalCost = 1.0f, float badRegionPenalty = 1.0f) { _ptr = StitchingInvoke.cveGraphCutSeamFinderCreate(costFunc, terminalCost, badRegionPenalty, ref _seamFinderPtr); }
/// <summary> /// Release all the unmanaged memory associated with this exposure compensator /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveNoExposureCompensatorRelease(ref _ptr); } }
public SurfFeaturesFinderGpu( double hessThresh, int numOctaves, int numLayers, int numOctavesDescr, int numLayersDescr) { _ptr = StitchingInvoke.cveSurfFeaturesFinderGpuCreate( hessThresh, numOctaves, numLayers, numOctavesDescr, numLayersDescr, ref _featuresFinderPtr); }
/// <summary> /// Release the unmanaged memory associated with this warper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveCylindricalWarperRelease(ref _ptr); } }
/// <summary> /// Release the unmanaged memory associated with this warper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cvePlaneWarperGpuRelease(ref _ptr); } }
/// <summary> /// Release all the unmanaged memory associated with this bundle adjuster /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveBundleAdjusterReprojRelease(ref _ptr); } }
/// <summary> /// Release the unmanaged memory associated with this warper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveDetailFisheyeWarperRelease(ref _ptr); } }
/// <summary> /// Release all the unmanaged memory associated with this blender /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveBestOf2NearestMatcherRelease(ref _ptr); } }
/// <summary> /// Blends and returns the final pano. /// </summary> /// <param name="dst">Final pano</param> /// <param name="dstMask">Final pano mask</param> public void Blend(IInputOutputArray dst, IInputOutputArray dstMask) { using (InputOutputArray ioaDst = dst.GetInputOutputArray()) using (InputOutputArray ioaDstMask = dstMask.GetInputOutputArray()) { StitchingInvoke.cveBlenderBlend(_blenderPtr, ioaDst, ioaDstMask); } }
/// <summary> /// Processes the image. /// </summary> /// <param name="img">Source image</param> /// <param name="mask">Source image mask</param> /// <param name="tl">Source image top-left corners</param> public void Feed(IInputArray img, IInputArray mask, Point tl) { using (InputArray iaImg = img.GetInputArray()) using (InputArray iaMask = mask.GetInputArray()) { StitchingInvoke.cveBlenderFeed(_blenderPtr, iaImg, iaMask, ref tl); } }
/// <summary> /// Prepares the blender for blending. /// </summary> /// <param name="corners">Source images top-left corners</param> /// <param name="sizes">Source image sizes</param> public void Prepare(Point[] corners, Size[] sizes) { using (VectorOfPoint vpCorners = new VectorOfPoint(corners)) using (VectorOfSize vsSizes = new VectorOfSize(sizes)) { StitchingInvoke.cveBlenderPrepare(_blenderPtr, vpCorners, vsSizes); } }
/// <summary> /// Release all unmanaged resources associated with this exposure compensator /// </summary> protected override void DisposeObject() { if (_ptr != IntPtr.Zero) { StitchingInvoke.cveGainCompensatorRelease(ref _ptr); } base.DisposeObject(); }
/// <summary> /// Release all the unmanaged memory associated with this estimator /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveAffineBasedEstimatorRelease(ref _ptr); } }
/// <summary> /// Release all the unmanaged memory associated with this FeaturesFinder /// </summary> protected override void DisposeObject() { if (_ptr != IntPtr.Zero) { StitchingInvoke.cveAKAZEFeaturesFinderRelease(ref _ptr); _featuresFinderPtr = IntPtr.Zero; } }
/// <summary> /// Release all the unmanaged memory associated with this seam finder /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveNoSeamFinderRelease(ref _ptr); } }
/// <summary> /// Release memory associated with this stitcher /// </summary> protected override void DisposeObject() { if (_sharedPtr == IntPtr.Zero) { StitchingInvoke.cveStitcherRelease(ref _sharedPtr); _ptr = IntPtr.Zero; } }
/// <summary> /// Create the GPU version of SURF Features finder /// </summary> /// <param name="hessThresh"> /// Only features with keypoint.hessian larger than that are extracted. /// good default value is ~300-500 (can depend on the average local contrast and sharpness of the image). /// user can further filter out some features based on their hessian values and other characteristics /// </param> /// <param name="numOctaves"> /// The number of octaves to be used for extraction. /// With each next octave the feature size is doubled /// </param> /// <param name="numLayers"> /// The number of layers within each octave /// </param> /// <param name="numOctavesDescr">The number of Octaves descriptors</param> /// <param name="numLayersDescr">The number of Layers descriptors</param> public SurfFeaturesFinderGpu( double hessThresh = 300, int numOctaves = 3, int numLayers = 4, int numOctavesDescr = 3, int numLayersDescr = 4) { _ptr = StitchingInvoke.cveSurfFeaturesFinderGpuCreate( hessThresh, numOctaves, numLayers, numOctavesDescr, numLayersDescr, ref FeaturesFinderPtr); }
/// <summary> /// Release all unmanaged resources associated with this blender /// </summary> protected override void DisposeObject() { if (_ptr != IntPtr.Zero) { StitchingInvoke.cveMultiBandBlenderRelease(ref _ptr); } base.DisposeObject(); }
/// <summary> /// These functions try to match the given images and to estimate rotations of each camera. /// </summary> /// <param name="images">Input images.</param> /// <param name="masks">Masks for each input image specifying where to look for keypoints (optional).</param> /// <returns>Status code.</returns> public Stitcher.Status EstimateTransform(IInputArrayOfArrays images, IInputArrayOfArrays masks = null) { using (InputArray iaImages = images.GetInputArray()) using (InputArray iaMasks = masks == null ? InputArray.GetEmpty() : masks.GetInputArray()) { return(StitchingInvoke.cveStitcherEstimateTransform(_ptr, iaImages, iaMasks)); } }
/// <summary> /// Release the unmanaged memory associated with this wraper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveTransverseMercatorWarperRelease(ref _ptr); } }
/// <summary> /// Release the unmanaged memory associated with this wraper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cvePaniniPortraitWarperRelease(ref _ptr); } }
/// <summary> /// Release the unmanaged memory associated with this wraper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveCompressedRectilinearWarperRelease(ref _ptr); } }
/// <summary> /// Release the unmanaged memory associated with this wraper /// </summary> protected override void DisposeObject() { base.DisposeObject(); if (_ptr != IntPtr.Zero) { StitchingInvoke.cveStereographicWarperRelease(ref _ptr); } }
/// <summary> /// Create a new features matcher /// </summary> /// <param name="fullAffine">Full Affine</param> /// <param name="tryUseGpu">If true, will try to use gpu</param> /// <param name="matchConf">Match confident</param> /// <param name="numMatchesThresh1">Number of matches threshold</param> public AffineBestOf2NearestMatcher( bool fullAffine = false, bool tryUseGpu = false, float matchConf = 0.3f, int numMatchesThresh1 = 6) { _ptr = StitchingInvoke.cveAffineBestOf2NearestMatcherCreate( fullAffine, tryUseGpu, matchConf, numMatchesThresh1, ref _featuresMatcherPtr); }
/// <summary> /// Create a new features matcher /// </summary> /// <param name="tryUseGpu">If true, will try to use gpu.</param> /// <param name="matchConf">Match confident</param> /// <param name="numMatchesThresh1">Number of matches threshold</param> /// <param name="numMatchesThresh2">Number of matches threshold</param> public BestOf2NearestMatcher( bool tryUseGpu = false, float matchConf = 0.3f, int numMatchesThresh1 = 6, int numMatchesThresh2 = 6) { _ptr = StitchingInvoke.cveBestOf2NearestMatcherCreate( tryUseGpu, matchConf, numMatchesThresh1, numMatchesThresh2, ref _featuresMatcherPtr); }
/// <summary> /// Projects the image. /// </summary> /// <param name="src">Source image</param> /// <param name="K">Camera intrinsic parameters</param> /// <param name="R">Camera rotation matrix</param> /// <param name="interpMode">Interpolation mode</param> /// <param name="borderMode">Border extrapolation mode</param> /// <param name="dst">Projected image</param> /// <returns>Project image top-left corner</returns> public Point Warp(IInputArray src, IInputArray K, IInputArray R, CvEnum.Inter interpMode, CvEnum.BorderType borderMode, IOutputArray dst) { Point corner = new Point(); using (InputArray iaSrc = src.GetInputArray()) using (InputArray iaK = K.GetInputArray()) using (InputArray iaR = R.GetInputArray()) using (OutputArray oaDst = dst.GetOutputArray()) { StitchingInvoke.cveRotationWarperWarp(_rotationWarper, iaSrc, iaK, iaR, interpMode, borderMode, oaDst, ref corner); return(corner); } }
/// <summary> /// Builds the projection maps according to the given camera data. /// </summary> /// <param name="srcSize">Source image size</param> /// <param name="K">Camera intrinsic parameters</param> /// <param name="R">Camera rotation matrix</param> /// <param name="xmap">Projection map for the x axis</param> /// <param name="ymap">Projection map for the y axis</param> /// <returns>Projected image minimum bounding box</returns> public Rectangle BuildMaps(Size srcSize, IInputArray K, IInputArray R, IOutputArray xmap, IOutputArray ymap) { Rectangle result = new Rectangle(); using (InputArray iaK = K.GetInputArray()) using (InputArray iaR = R.GetInputArray()) using (OutputArray oaXmap = xmap.GetOutputArray()) using (OutputArray oaYmap = ymap.GetOutputArray()) { StitchingInvoke.cveRotationWarperBuildMaps(_rotationWarper, ref srcSize, iaK, iaR, oaXmap, oaYmap, ref result); return(result); } }
/// <summary> /// Creates an AKAZE features finder /// </summary> /// <param name="descriptorType">Type of the extracted descriptor</param> /// <param name="descriptorSize">Size of the descriptor in bits. 0 -> Full size</param> /// <param name="descriptorChannels">Number of channels in the descriptor (1, 2, 3)</param> /// <param name="threshold">Detector response threshold to accept point</param> /// <param name="nOctaveLayers"> Default number of sublevels per scale level</param> /// <param name="nOctaves">Maximum octave evolution of the image</param> /// <param name="diffusivity">Diffusivity type</param> public AKAZEFeaturesFinder( AKAZE.DescriptorType descriptorType, int descriptorSize, int descriptorChannels, float threshold, int nOctaves, int nOctaveLayers, KAZE.Diffusivity diffusivity) { _ptr = StitchingInvoke.cveAKAZEFeaturesFinderCreate( descriptorType, descriptorSize, descriptorChannels, threshold, nOctaves, nOctaveLayers, diffusivity, ref _featuresFinderPtr); }