bool AddImageSamplerOutput(out string errorMessage, int minimumSampleIntervalInMilliSeconds)
        {
            errorMessage = string.Empty;

            // create a VideoDataOutput and add it to the capture session
            frameGrabberOutput = new AVCaptureVideoDataOutput();
            frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;
            // set up the output queue and delegate
            queue             = new DispatchQueue("captureQueue");
            videoFrameSampler = new VideoFrameSamplerDelegate();
            frameGrabberOutput.SetSampleBufferDelegateQueue(videoFrameSampler, queue);

            // subscribe to from capture events
            videoFrameSampler.CaptureError  += HandleImageCaptureError;
            videoFrameSampler.ImageCaptured += HandleImageCaptured;

            // add the output to the session
            session.AddOutput(frameGrabberOutput);

            // set minimum time interval between image samples (if possible).
            try {
                AVCaptureConnection connection = frameGrabberOutput.Connections[0];
                connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
            } catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }

            return(true);
        }
Ejemplo n.º 2
0
        private bool addImageSamplerOutput(out string errorMessage, int minimumSampleIntervalInMilliSeconds)
        {
            errorMessage = "";

            // create a VideoDataOutput and add it to the capture session
            frameGrabberOutput = new AVCaptureVideoDataOutput();
            frameGrabberOutput.VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA);

            // set up the output queue and delegate
            queue             = new MonoTouch.CoreFoundation.DispatchQueue("captureQueue");
            videoFrameSampler = new VideoFrameSamplerDelegate();
            frameGrabberOutput.SetSampleBufferDelegateAndQueue(videoFrameSampler, queue);

            // subscribe to from capture events
            videoFrameSampler.CaptureError  += new EventHandler <CaptureErrorEventArgs>(handleImageCaptureError);
            videoFrameSampler.ImageCaptured += new EventHandler <ImageCaptureEventArgs>(handleImageCaptured);

            // add the output to the session
            session.AddOutput(frameGrabberOutput);

            // set minimum time interval between image samples (if possible).
            try
            {
                AVCaptureConnection connection = (AVCaptureConnection)frameGrabberOutput.Connections[0];
                connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
            }
            catch
            {
            }

            return(true);
        }
Ejemplo n.º 3
0
		private bool addImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = "";

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes () { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new CoreFoundation.DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += new EventHandler<CaptureErrorEventArgs>( handleImageCaptureError );
			videoFrameSampler.ImageCaptured += new EventHandler<ImageCaptureEventArgs>( handleImageCaptured );

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try
			{
				AVCaptureConnection connection = (AVCaptureConnection)frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			}
			catch
			{
			}

			return true;
		}
		bool AddImageSamplerOutput( out string errorMessage, int minimumSampleIntervalInMilliSeconds )
		{
			errorMessage = string.Empty;

			// create a VideoDataOutput and add it to the capture session
			frameGrabberOutput = new AVCaptureVideoDataOutput();
			frameGrabberOutput.WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary;
			// set up the output queue and delegate
			queue = new DispatchQueue ("captureQueue");
			videoFrameSampler = new VideoFrameSamplerDelegate();
			frameGrabberOutput.SetSampleBufferDelegateQueue (videoFrameSampler, queue);

			// subscribe to from capture events
			videoFrameSampler.CaptureError += HandleImageCaptureError;
			videoFrameSampler.ImageCaptured += HandleImageCaptured;

			// add the output to the session
			session.AddOutput (frameGrabberOutput);

			// set minimum time interval between image samples (if possible).
			try {
				AVCaptureConnection connection = frameGrabberOutput.Connections[0];
				connection.VideoMinFrameDuration = new CMTime(minimumSampleIntervalInMilliSeconds, 1000);
			} catch (Exception ex) {
				Console.WriteLine (ex.Message);
			}

			return true;
		}