public VTStatus CanAcceptFormatDescriptor(CMFormatDescription newDescriptor)
        {
            if (Handle == IntPtr.Zero)
            {
                throw new ObjectDisposedException("DecompressionSession");
            }

            return(VTDecompressionSessionCanAcceptFormatDescription(Handle, newDescriptor.Handle));
        }
示例#2
0
            public bool SetupAssetWriterAudioInput(CMFormatDescription currentFormatDescription)
            {
                // If the AudioStreamBasicDescription is null return false;
                if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
                {
                    return(false);
                }

                var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

                // Get the Audio Channel Layout from the Format Description.
                var currentChannelLayout     = currentFormatDescription.AudioChannelLayout;
                var currentChannelLayoutData = currentChannelLayout == null ? new NSData() : currentChannelLayout.AsData();

                NSDictionary audioCompressionSettings = NSDictionary.FromObjectsAndKeys(
                    new NSObject[]
                {
                    NSNumber.FromInt32((int)AudioFormatType.MPEG4AAC),
                    NSNumber.FromDouble(currentASBD.SampleRate),
                    NSNumber.FromInt32(64000),
                    NSNumber.FromInt32(currentASBD.ChannelsPerFrame),
                    currentChannelLayoutData
                },
                    new NSObject[]
                {
                    AVAudioSettings.AVFormatIDKey,
                    AVAudioSettings.AVSampleRateKey,
                    AVAudioSettings.AVEncoderBitRateKey,
                    AVAudioSettings.AVNumberOfChannelsKey,
                    new NSString("AVChannelLayoutKey")                             //AVAudioSettings.AVChannelLayoutKey,
                });

                if (processor.assetWriter.CanApplyOutputSettings(audioCompressionSettings, AVMediaType.Audio))
                {
                    processor.assetWriterAudioIn = new AVAssetWriterInput(AVMediaType.Audio, audioCompressionSettings);
                    processor.assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

                    if (processor.assetWriter.CanAddInput(processor.assetWriterAudioIn))
                    {
                        processor.assetWriter.AddInput(processor.assetWriterAudioIn);
                    }
                    else
                    {
                        Console.WriteLine("Couldn't add asset writer audio input.");
                        return(false);
                    }
                }
                else
                {
                    Console.WriteLine("Couldn't apply audio output settings.");
                    return(false);
                }

                return(true);
            }
示例#3
0
        public void ClosedCaption()
        {
            CMFormatDescriptionError fde;

            using (var fd = CMFormatDescription.Create(CMMediaType.ClosedCaption, (uint)CMClosedCaptionFormatType.CEA608, out fde))
            {
                Assert.AreEqual(CMFormatDescriptionError.None, fde, "#1");
                Assert.AreEqual((CMMuxedStreamType)0, fd.MuxedStreamType, "#2");
                Assert.AreEqual(CMMediaType.ClosedCaption, fd.MediaType, "#3");
                Assert.AreEqual(CMClosedCaptionFormatType.CEA608, fd.ClosedCaptionFormatType, "#4");
            }
        }
        private CGImage CreateImage(CMSampleBuffer sampleBuffer)
        {
            CGImage image = null;

            CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();
            var           subType     = formatDescription.MediaSubType;
            CMBlockBuffer blockBuffer = sampleBuffer.GetDataBuffer();

            if (blockBuffer != null)
            {
                if (subType != (int)CMVideoCodecType.JPEG)
                {
                    throw new Exception("Block buffer must be JPEG encoded.");
                }

                var jpegData = new NSMutableData();
                jpegData.Length = blockBuffer.DataLength;

                blockBuffer.CopyDataBytes(0, blockBuffer.DataLength, jpegData.Bytes);

                using (var imageSource = CGImageSource.FromData(jpegData)) {
                    var decodeOptions = new CGImageOptions {
                        ShouldAllowFloat = false,
                        ShouldCache      = false
                    };

                    image = imageSource.CreateImage(0, decodeOptions);
                }
            }
            else
            {
                if (subType != (int)CVPixelFormatType.CV32BGRA)
                {
                    throw new Exception("Image buffer must be BGRA encoded.");
                }

                CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();

                using (var colorSpace = CGColorSpace.CreateDeviceRGB())
                    using (var bitmapContext = new CGBitmapContext(imageBuffer.Handle,
                                                                   (int)imageBuffer.DisplaySize.Width, (int)imageBuffer.DisplaySize.Height, 8, 0, colorSpace, CGImageAlphaInfo.NoneSkipFirst)) {
                        image = bitmapContext.ToImage();
                    }
            }

            return(image);
        }
        public bool SetupAssetWriterAudioInput(CMFormatDescription currentFormatDescription)
        {
            // If the AudioStreamBasicDescription is null return false;
            if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
            {
                return(false);
            }

            var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

            // Get the Audio Channel Layout from the Format Description.
            var currentChannelLayout     = currentFormatDescription.AudioChannelLayout;
            var currentChannelLayoutData = currentChannelLayout == null ? new NSData() : currentChannelLayout.AsData();

            NSDictionary audioCompressionSettings = new NSDictionary(
                AVAudioSettings.AVFormatIDKey, AudioFormatType.MPEG4AAC,
                AVAudioSettings.AVSampleRateKey, currentASBD.SampleRate,
                AVAudioSettings.AVEncoderBitRateKey, 64000,
                AVAudioSettings.AVNumberOfChannelsKey, currentASBD.ChannelsPerFrame,
                AVAudioSettings.AVChannelLayoutKey, currentChannelLayoutData
                );

            if (assetWriter.CanApplyOutputSettings(audioCompressionSettings, AVMediaType.Audio))
            {
                // HACK: Change NSDictionary into AudioSettings created using that NSDictionary (audioCompressionSettings)
                assetWriterAudioIn = new AVAssetWriterInput(AVMediaType.Audio, new AudioSettings(audioCompressionSettings));
                assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

                if (assetWriter.CanAddInput(assetWriterAudioIn))
                {
                    assetWriter.AddInput(assetWriterAudioIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer audio input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply audio output settings.");
                return(false);
            }

            return(true);
        }
示例#6
0
        public void CreateReadyWithPacketDescriptions()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);
            TestRuntime.AssertSystemVersion(PlatformName.MacOSX, 10, 10, throwIfOtherPlatform: false);

            CMBlockBufferError bbe;

            using (var bb = CMBlockBuffer.CreateEmpty(0, CMBlockBufferFlags.AlwaysCopyData, out bbe)) {
                CMFormatDescriptionError fde;
                using (var fd = CMFormatDescription.Create(CMMediaType.ClosedCaption, (uint)CMClosedCaptionFormatType.CEA608, out fde)) {
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateReadyWithPacketDescriptions(bb, fd, 1, CMTime.Indefinite, null, out sbe)) {
                        Assert.Null(sb, "CMSampleBuffer");
                        // the `null` does not match format description (but I lack a better test, at least it's callable)
                        Assert.That(sbe, Is.EqualTo(CMSampleBufferError.RequiredParameterMissing), "CMSampleBufferError");
                    }
                }
            }
        }
示例#7
0
        public void Video()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false);

            CMFormatDescriptionError fde;

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
            case AVAuthorizationStatus.NotDetermined:
                // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly.
//				Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null ({0})", auth);
//				Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError");
                break;

            case AVAuthorizationStatus.Authorized:
                // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly.
//				Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null (authorized)");
//				Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError (authorized)");

                using (var captureSession = new AVCaptureSession()) {
                    using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                        if (videoDevice == null)
                        {
                            Assert.Inconclusive("Failed to create a video device for testing");
                        }
                        NSError error;
                        using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                            // this seems to initialize something.
                        }
                    }
                }

                Assert.IsNotNull(CMFormatDescription.Create(CMMediaType.Video, (uint)CMVideoCodecType.H264, out fde), "not null (authorized)");
                Assert.That(fde, Is.EqualTo(CMFormatDescriptionError.None), "CMFormatDescriptionError #2 (authorized)");
                break;
            }
        }
示例#8
0
        public void CreateReadyWithPacketDescriptions()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Inconclusive("Requires iOS8");
            }

            CMBlockBufferError bbe;

            using (var bb = CMBlockBuffer.CreateEmpty(0, CMBlockBufferFlags.AlwaysCopyData, out bbe)) {
                CMFormatDescriptionError fde;
                using (var fd = CMFormatDescription.Create(CMMediaType.ClosedCaption, (uint)CMClosedCaptionFormatType.CEA608, out fde)) {
                    CMSampleBufferError sbe;
                    using (var sb = CMSampleBuffer.CreateReadyWithPacketDescriptions(bb, fd, 1, CMTime.Indefinite, null, out sbe)) {
                        Assert.Null(sb, "CMSampleBuffer");
                        // the `null` does not match format description (but I lack a better test, at least it's callable)
                        Assert.That(sbe, Is.EqualTo(CMSampleBufferError.RequiredParameterMissing), "CMSampleBufferError");
                    }
                }
            }
        }
示例#9
0
        public void Video()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(7, 0))
            {
                Assert.Ignore("This test fails on iOS 6 (even though the API exists).");
            }

            CMFormatDescriptionError fde;

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
            case AVAuthorizationStatus.NotDetermined:
                // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly.
//				Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null ({0})", auth);
//				Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError");
                break;

            case AVAuthorizationStatus.Authorized:
                // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly.
//				Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null (authorized)");
//				Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError (authorized)");

                using (var captureSession = new AVCaptureSession()) {
                    using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                        NSError error;
                        using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                            // this seems to initialize something.
                        }
                    }
                }

                Assert.IsNotNull(CMFormatDescription.Create(CMMediaType.Video, (uint)CMVideoCodecType.H264, out fde), "not null (authorized)");
                Assert.That(fde, Is.EqualTo(CMFormatDescriptionError.None), "CMFormatDescriptionError #2 (authorized)");
                break;
            }
        }
示例#10
0
            public bool SetupAssetWriterVideoInput(CMFormatDescription currentFormatDescription)
            {
                Console.WriteLine("Setting up Video Asset Writer");
                float bitsPerPixel;
                var   dimensions = currentFormatDescription.VideoDimensions;
                int   numPixels  = dimensions.Width * dimensions.Height;
                int   bitsPerSecond;

                // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
                if (numPixels < (640 * 480))
                {
                    bitsPerPixel = 4.05F;                     // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
                }
                else
                {
                    bitsPerPixel = 11.4F;                     // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
                }
                bitsPerSecond = Convert.ToInt32((float)numPixels * bitsPerPixel);

                NSDictionary videoCompressionSettings = NSDictionary.FromObjectsAndKeys(
                    new NSObject[]
                {                           // The Compression Settings Values
                    AVVideo.CodecH264,
                    NSNumber.FromInt32(dimensions.Width),
                    NSNumber.FromInt32(dimensions.Height),
                    NSDictionary.FromObjectsAndKeys(
                        new object[]
                    {                                           // Compression Property Values
                        NSNumber.FromInt32(bitsPerSecond),
                        NSNumber.FromInt32(30)
                    },
                        new object[]
                    {                                           // Compression Property Keys
                        AVVideo.AverageBitRateKey,
                        AVVideo.MaxKeyFrameIntervalKey
                    })
                },
                    new NSObject[]
                {                               // The Compression Settings Keys
                    AVVideo.CodecKey,
                    AVVideo.WidthKey,
                    AVVideo.HeightKey,
                    AVVideo.CompressionPropertiesKey
                }
                    );

                if (processor.assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video))
                {
                    processor.assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, videoCompressionSettings);
                    processor.assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
                    processor.assetWriterVideoIn.Transform = processor.TransformFromCurrentVideoOrientationToOrientation(processor.ReferenceOrientation);

                    if (processor.assetWriter.CanAddInput(processor.assetWriterVideoIn))
                    {
                        processor.assetWriter.AddInput(processor.assetWriterVideoIn);
                    }
                    else
                    {
                        Console.WriteLine("Couldn't add asset writer video input.");
                        return(false);
                    }
                }
                else
                {
                    Console.WriteLine("Couldn't apply video output settings.");
                }

                return(true);
            }
示例#11
0
            public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
            {
                CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();

                if (connection == processor.videoConnection)
                {
                    // Get framerate
                    CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                    CalculateFramerateAtTimestamp(timestamp);

                    // Get frame dimensions (for onscreen display)
                    if (processor.VideoDimensions.Width == 0 && processor.VideoDimensions.Height == 0)
                    {
                        processor.VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false);
                    }

                    // Get the buffer type
                    if (processor.VideoType == 0)
                    {
                        processor.VideoType = formatDescription.MediaSubType;
                    }
                    // TODO: processor.VideoType = (CMVideoCodecType)Enum.ToObject (typeof(CMVideoCodecType), formatDescription.MediaSubType);

                    // Synchronously process the pixel buffer to de-green it.
                    using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                        ProcessPixelBuffer(pixelBuffer);

                    processor.previewBufferQueue.Enqueue(sampleBuffer);

                    //var writeBuffer = sampleBuffer.Duplicate ();
                    InvokeOnMainThread(() => {
                        var j = processor.previewBufferQueue.Dequeue();

                        var sbuf = j as CMSampleBuffer;
                        if (sbuf == null)
                        {
                            // Record the current sampleBuffer.ClassHandle
                            // Then run another iteration and on the next one, print the ClassHandle
                            Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle)));
                            return;
                        }

                        using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()){
                            if (processor.PixelBufferReadyForDisplay != null)
                            {
                                processor.PixelBufferReadyForDisplay(pixBuf);
                            }
                        }

                        if (processor.assetWriter == null)
                        {
                            sbuf.Dispose();
                        }
                        else
                        {
                            processor.CompleteBufferUse(sbuf);
                        }
                    });
                }


                processor.movieWritingQueue.DispatchAsync(() => {
                    if (processor.assetWriter != null)
                    {
                        bool wasReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);

                        // Initialize the video input if this is not done yet
                        if (!processor.readyToRecordVideo)
                        {
                            processor.readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (processor.readyToRecordVideo && processor.readyToRecordAudio)
                        {
                            processor.WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }

                        bool isReadyToRecord = (processor.readyToRecordAudio && processor.readyToRecordVideo);

                        if (!wasReadyToRecord && isReadyToRecord)
                        {
                            processor.recordingWillBeStarted = false;
                            processor.IsRecording            = true;

                            if (processor.RecordingDidStart != null)
                            {
                                processor.RecordingDidStart();
                            }
                        }

                        processor.CompleteBufferUse(sampleBuffer);
                    }
                });
            }
		public bool SetupAssetWriterAudioInput (CMFormatDescription currentFormatDescription)
		{
			// If the AudioStreamBasicDescription is null return false;
			if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
				return false;

			var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

			// Get the Audio Channel Layout from the Format Description.
			var currentChannelLayout = currentFormatDescription.AudioChannelLayout;
			var currentChannelLayoutData = currentChannelLayout == null ? new NSData () : currentChannelLayout.AsData ();

			NSDictionary audioCompressionSettings = NSDictionary.FromObjectsAndKeys (
				new NSObject[]
				{ 
					NSNumber.FromInt32 ((int)AudioFormatType.MPEG4AAC), 
					NSNumber.FromDouble (currentASBD.SampleRate),
					NSNumber.FromInt32 (64000),
					NSNumber.FromInt32 (currentASBD.ChannelsPerFrame),
					currentChannelLayoutData
				},
				new NSObject[]
				{ 
					AVAudioSettings.AVFormatIDKey,
					AVAudioSettings.AVSampleRateKey,
					AVAudioSettings.AVEncoderBitRateKey,
					AVAudioSettings.AVNumberOfChannelsKey,
					new NSString("AVChannelLayoutKey") //AVAudioSettings.AVChannelLayoutKey,
				});

			if (assetWriter.CanApplyOutputSettings (audioCompressionSettings, AVMediaType.Audio)){
				assetWriterAudioIn = new AVAssetWriterInput (AVMediaType.Audio, audioCompressionSettings);
				assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

				if (assetWriter.CanAddInput (assetWriterAudioIn))
					assetWriter.AddInput (assetWriterAudioIn);
				else {
					Console.WriteLine ("Couldn't add asset writer audio input.");
					return false;
				}
			} else {
				Console.WriteLine ("Couldn't apply audio output settings.");
				return false;
			}

			return true;
		}
		public bool SetupAssetWriterVideoInput (CMFormatDescription currentFormatDescription)
		{
			//Console.WriteLine ("Setting up Video Asset Writer");
			float bitsPerPixel;
			var dimensions = currentFormatDescription.VideoDimensions;
			int numPixels = dimensions.Width * dimensions.Height;
			int bitsPerSecond; 
			
			// Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
			bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;
			
			bitsPerSecond = (int) (numPixels * bitsPerPixel);
			
			NSDictionary videoCompressionSettings = NSDictionary.FromObjectsAndKeys (
				new NSObject[] 
				{   // The Compression Settings Values
					AVVideo.CodecH264,
					NSNumber.FromInt32 (dimensions.Width),
					NSNumber.FromInt32 (dimensions.Height),
					NSDictionary.FromObjectsAndKeys (
						new object[] 
						{	// Compression Property Values
							NSNumber.FromInt32 (bitsPerSecond),
							NSNumber.FromInt32 (30)
						},
						new object[]
						{	// Compression Property Keys
							AVVideo.AverageBitRateKey,
							AVVideo.MaxKeyFrameIntervalKey
						})
				},
				new NSObject[]
				{	// The Compression Settings Keys
					AVVideo.CodecKey,
					AVVideo.WidthKey,
					AVVideo.HeightKey,
					AVVideo.CompressionPropertiesKey
				}
				);
			
			if (assetWriter.CanApplyOutputSettings (videoCompressionSettings, AVMediaType.Video)){
				assetWriterVideoIn = new AVAssetWriterInput (AVMediaType.Video, videoCompressionSettings);
				assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
				assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation (ReferenceOrientation);
				
				if (assetWriter.CanAddInput (assetWriterVideoIn))
					assetWriter.AddInput (assetWriterVideoIn);
				else {
					Console.WriteLine ("Couldn't add asset writer video input.");
					return false;
				}
			} else 
				Console.WriteLine ("Couldn't apply video output settings.");	
			
			return true;
		}
		public bool SetupAssetWriterAudioInput (CMFormatDescription currentFormatDescription)
		{
			// If the AudioStreamBasicDescription is null return false;
			if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
				return false;

			var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

			// Get the Audio Channel Layout from the Format Description.
			var currentChannelLayout = currentFormatDescription.AudioChannelLayout;
			var currentChannelLayoutData = currentChannelLayout == null ? new NSData () : currentChannelLayout.AsData ();

			NSDictionary audioCompressionSettings = new NSDictionary (
				AVAudioSettings.AVFormatIDKey, AudioFormatType.MPEG4AAC,
				AVAudioSettings.AVSampleRateKey, currentASBD.SampleRate,
				AVAudioSettings.AVEncoderBitRateKey, 64000,
				AVAudioSettings.AVNumberOfChannelsKey, currentASBD.ChannelsPerFrame,
				AVAudioSettings.AVChannelLayoutKey, currentChannelLayoutData
			);

			if (assetWriter.CanApplyOutputSettings (audioCompressionSettings, AVMediaType.Audio)){
				// HACK: Change NSDictionary into AudioSettings created using that NSDictionary (audioCompressionSettings)
				assetWriterAudioIn = new AVAssetWriterInput (AVMediaType.Audio, new AudioSettings(audioCompressionSettings));
				assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

				if (assetWriter.CanAddInput (assetWriterAudioIn))
					assetWriter.AddInput (assetWriterAudioIn);
				else {
					Console.WriteLine ("Couldn't add asset writer audio input.");
					return false;
				}
			} else {
				Console.WriteLine ("Couldn't apply audio output settings.");
				return false;
			}

			return true;
		}
        public virtual void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            CMFormatDescription formatDescription = sampleBuffer.GetFormatDescription();

            if (connection == videoConnection)
            {
                // Get framerate
                CMTime timestamp = sampleBuffer.PresentationTimeStamp;
                CalculateFramerateAtTimestamp(timestamp);

                // Get frame dimensions (for onscreen display)
                if (VideoDimensions.IsEmpty)
                {
                    VideoDimensions = formatDescription.GetVideoPresentationDimensions(true, false);
                }

                // Get the buffer type
                if (VideoType == 0)
                {
                    VideoType = formatDescription.MediaSubType;
                }

                // Synchronously process the pixel buffer to de-green it.
                using (var pixelBuffer = sampleBuffer.GetImageBuffer())
                    ProcessPixelBuffer(pixelBuffer);

                previewBufferQueue.Enqueue(sampleBuffer);

                //var writeBuffer = sampleBuffer.Duplicate ();
                InvokeOnMainThread(() => {
                    var j = previewBufferQueue.Dequeue();

                    var sbuf = j as CMSampleBuffer;
                    if (sbuf == null)
                    {
#if DEBUG
                        // Record the current sampleBuffer.ClassHandle
                        // Then run another iteration and on the next one, print the ClassHandle
                        Console.WriteLine("The type is {0}", new NSString(CFCopyDescription(j.Handle)));
#endif
                        return;
                    }

                    using (CVImageBuffer pixBuf = sbuf.GetImageBuffer()) {
                        if (PixelBufferReadyForDisplay != null)
                        {
                            PixelBufferReadyForDisplay(pixBuf);
                        }
                    }
                });
            }
            // keep a reference to 'sampleBuffer', movieWritingQueue will remove it
            CompleteBufferUse(sampleBuffer);

            movieWritingQueue.DispatchAsync(() => {
                if (assetWriter != null)
                {
                    bool wasReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (connection == videoConnection)
                    {
                        // Initialize the video input if this is not done yet
                        if (!readyToRecordVideo)
                        {
                            readyToRecordVideo = SetupAssetWriterVideoInput(formatDescription);
                        }

                        // Write the video data to file
                        if (readyToRecordVideo && readyToRecordAudio)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Video);
                        }
                    }
                    else if (connection == audioConnection)
                    {
                        if (!readyToRecordAudio)
                        {
                            readyToRecordAudio = SetupAssetWriterAudioInput(formatDescription);
                        }

                        if (readyToRecordAudio && readyToRecordVideo)
                        {
                            WriteSampleBuffer(sampleBuffer, AVMediaType.Audio);
                        }
                    }
                    bool isReadyToRecord = (readyToRecordAudio && readyToRecordVideo);

                    if (!wasReadyToRecord && isReadyToRecord)
                    {
                        recordingWillBeStarted = false;
                        IsRecording            = true;

                        if (RecordingDidStart != null)
                        {
                            RecordingDidStart();
                        }
                    }
                }
                CompleteBufferUse(sampleBuffer);
            });
        }