/// <summary> /// Release the unmanaged memory /// </summary> protected override void DisposeObject() { VideoStabInvoke.TwoPassStabilizerRelease(ref _ptr); _stabilizerBase = IntPtr.Zero; //_captureFrameSource.Dispose(); base.Dispose(); }
/// <summary> /// Retrieve the next frame from the FrameSoure /// </summary> /// <returns></returns> public Mat NextFrame() { Mat frame = new Mat(); VideoStabInvoke.VideostabFrameSourceGetNextFrame(FrameSourcePtr, frame); return(frame); }
/* * public void SetMotionEstimator(PyrLkRobustMotionEstimator estimator) * { * VideoStabInvoke.StabilizerBaseSetMotionEstimator(_stabilizerBase, estimator); * }*/ /// <summary> /// Release the unmanaged memory associated with the stabilizer /// </summary> protected override void DisposeObject() { if (_ptr != IntPtr.Zero) { VideoStabInvoke.cveOnePassStabilizerRelease(ref _ptr); } _stabilizerBase = IntPtr.Zero; base.Dispose(); }
/// <summary> /// Release the unmanaged memory associated with this CaptureFrameSource /// </summary> protected override void DisposeObject() { if (_ptr != IntPtr.Zero) { VideoStabInvoke.cveVideostabCaptureFrameSourceRelease(ref _ptr); } FrameSourcePtr = IntPtr.Zero; base.DisposeObject(); }
/// <summary> /// Create a two pass video stabilizer. /// </summary> /// <param name="baseFrameSource">The capture object to be stabilized. Should not be a camera stream.</param> public TwoPassStabilizer(FrameSource baseFrameSource) { if (baseFrameSource.CaptureSource == Capture.CaptureModuleType.Camera) { throw new ArgumentException("Two pass stabilizer cannot process camera stream"); } _baseFrameSource = baseFrameSource; _ptr = VideoStabInvoke.TwoPassStabilizerCreate(_baseFrameSource, ref _stabilizerBase, ref FrameSourcePtr); }
/// <summary> /// Retrieve the next frame from the FrameSource /// </summary> /// <returns>The next frame. If no more frames, null will be returned.</returns> public Mat NextFrame() { Mat frame = new Mat(); if (VideoStabInvoke.cveVideostabFrameSourceGetNextFrame(FrameSourcePtr, frame)) { return(frame); } else { frame.Dispose(); return(null); } }
/// <summary> /// Retrieve the next frame from the FrameSoure /// </summary> /// <returns></returns> public Image <Bgr, Byte> NextFrame() { if (!VideoStabInvoke.FrameSourceGetNextFrame(_frameSourcePtr, ref _frameBuffer) || _frameBuffer == IntPtr.Zero) { return(null); } MIplImage iplImage = (MIplImage)Marshal.PtrToStructure(_frameBuffer, typeof(MIplImage)); Image <Bgr, Byte> res; if (iplImage.nChannels == 1) { //if the image captured is Grayscale, convert it to BGR res = new Image <Bgr, Byte>(iplImage.width, iplImage.height); CvInvoke.cvCvtColor(_frameBuffer, res.Ptr, Emgu.CV.CvEnum.COLOR_CONVERSION.CV_GRAY2BGR); } else { res = new Image <Bgr, byte>(iplImage.width, iplImage.height, iplImage.widthStep, iplImage.imageData); } return(res); }
/// <summary> /// Create a Capture frame source /// </summary> /// <param name="capture">The capture object that will be converted to a FrameSource</param> public CaptureFrameSource(Capture capture) { _ptr = VideoStabInvoke.VideostabCaptureFrameSourceCreate(capture, ref FrameSourcePtr); CaptureSource = capture.CaptureSource; }
/// <summary> /// Release all the unmanaged memory associated with this object /// </summary> protected override void DisposeObject() { VideoStabInvoke.cveGaussianMotionFilterRelease(ref _ptr); }
/// <summary> /// Create a Gaussian motion filter /// </summary> /// <param name="radius">The radius</param> /// <param name="stdev">The standard deviation</param> public GaussianMotionFilter(int radius = 15, float stdev = -1.0f) { _ptr = VideoStabInvoke.cveGaussianMotionFilterCreate(radius, stdev); }
/// <summary> /// Set the Motion Filter /// </summary> /// <param name="motionFilter">The motion filter</param> public void SetMotionFilter(GaussianMotionFilter motionFilter) { VideoStabInvoke.cveOnePassStabilizerSetMotionFilter(_ptr, motionFilter); }
/// <summary> /// Create a Capture frame source /// </summary> /// <param name="capture">The capture object that will be converted to a FrameSource</param> public CaptureFrameSource(Capture capture) { _ptr = VideoStabInvoke.CaptureFrameSourceCreate(capture); _frameSourcePtr = _ptr; }
/// <summary> /// Create a Gaussian motion filter /// </summary> /// <param name="radius">The radius, use 15 for default.</param> /// <param name="stdev">The standard deviation, use -1.0f for default</param> public GaussianMotionFilter(int radius, float stdev) { _ptr = VideoStabInvoke.GaussianMotionFilterCreate(radius, stdev); }
/// <summary> /// Retrieve the next frame from the FrameSource /// </summary> /// <param name="frame">The next frame</param> /// <returns>True if there are more frames</returns> public bool NextFrame(Mat frame) { return(VideoStabInvoke.cveVideostabFrameSourceGetNextFrame(FrameSourcePtr, frame)); }
/* * public GaussianMotionFilter() * : this(15, -1.0f) * { * }*/ public GaussianMotionFilter() { _ptr = VideoStabInvoke.GaussianMotionFilterCreate(); }
/// <summary> /// Release the unmanaged memory associated with this CaptureFrameSource /// </summary> protected override void DisposeObject() { VideoStabInvoke.CaptureFrameSourceRelease(ref _ptr); base.DisposeObject(); }
/// <summary> /// Create a one pass stabilizer /// </summary> /// <param name="baseFrameSource">The capture object to be stabalized</param> public OnePassStabilizer(FrameSource baseFrameSource) { _baseFrameSource = baseFrameSource; _ptr = VideoStabInvoke.cveOnePassStabilizerCreate(baseFrameSource.FrameSourcePtr, ref _stabilizerBase, ref FrameSourcePtr); }
/// <summary> /// Create a one pass stabilizer /// </summary> /// <param name="capture">The capture object to be stabalized</param> public OnePassStabilizer(Capture capture) { _captureFrameSource = new CaptureFrameSource(capture); _ptr = VideoStabInvoke.OnePassStabilizerCreate(_captureFrameSource, ref _stabilizerBase, ref _frameSourcePtr); }