public VideoChannel (AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput, IVideoTransformer transformer)
			: base(readerOutput, writerInput)
		{
			if (transformer == null)
				throw new ArgumentNullException ("transformer");

			this.transformer = transformer;

			var adaptorAttrs = new CVPixelBufferAttributes {
				PixelFormatType = CVPixelFormatType.CV32BGRA
			};
			adaptor = new AVAssetWriterInputPixelBufferAdaptor (WriterInput, adaptorAttrs);
		}
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession {
				SessionPreset = AVCaptureSession.PresetMedium
			};

			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (captureDevice == null) {
				Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
				return false;
			}
			//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
			NSError error = null;
			captureDevice.LockForConfiguration (out error);
			if (error != null) {
				Console.WriteLine (error);
				captureDevice.UnlockForConfiguration ();
				return false;
			}

			if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0))
				captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);
			captureDevice.UnlockForConfiguration ();

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null) {
				Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
				return false;
			}

			session.AddInput (input);

			// create a VideoDataOutput and add it to the sesion
			var settings = new CVPixelBufferAttributes {
				PixelFormatType = CVPixelFormatType.CV32BGRA
			};
			using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) {
				queue = new DispatchQueue ("myQueue");
				outputRecorder = new OutputRecorder ();
				output.SetSampleBufferDelegate (outputRecorder, queue);
				session.AddOutput (output);
			}

			session.StartRunning ();
			return true;
		}
		public ReadWriteSampleBufferChannel (AVAssetReaderOutput localAssetReaderOutput,
			AVAssetWriterInput localAssetWriterInput,
			bool useAdaptor)
		{
			_assetReaderOutput = localAssetReaderOutput;
			_assetWriterInput = localAssetWriterInput;
			_useAdaptor = useAdaptor;

			if (_useAdaptor) {
				var adaptorAttrs = new CVPixelBufferAttributes {
					PixelFormatType = CVPixelFormatType.CV32BGRA
				};
				_adaptor = AVAssetWriterInputPixelBufferAdaptor.FromInput (localAssetWriterInput, adaptorAttrs.Dictionary);
			}

			_serializationQueue = new DispatchQueue ("ReadWriteSampleBufferChannel queue");
		}
        public void TestCallbackBackground(bool stronglyTyped)
        {
            var      width  = 640;
            var      height = 480;
            var      encoder_specification = new VTVideoEncoderSpecification();
            var      source_attributes     = new CVPixelBufferAttributes(CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange, width, height);
            var      duration = new CMTime(40, 1);
            VTStatus status;

            using var frameProperties = new NSDictionary();

            int callbackCounter = 0;
            var failures        = new List <string> ();
            var callback        = new VTCompressionSession.VTCompressionOutputCallback((IntPtr sourceFrame, VTStatus status, VTEncodeInfoFlags flags, CMSampleBuffer buffer) =>
            {
                Interlocked.Increment(ref callbackCounter);
                if (status != VTStatus.Ok)
                {
                    failures.Add($"Callback #{callbackCounter} failed. Expected status = Ok, got status = {status}");
                }
#if !NET
                // Work around a crash that occur if the buffer isn't retained
                if (stronglyTyped)
                {
                    CFRetain(buffer.Handle);
                }
#endif
            });

            using var session = stronglyTyped
                                ? VTCompressionSession.Create(
                      width, height,
                      CMVideoCodecType.H264,
                      callback,
                      encoder_specification,
                      source_attributes
                      )
                                : VTCompressionSession.Create(
                      width, height,
                      CMVideoCodecType.H264,
                      callback,
                      encoder_specification,
                      source_attributes.Dictionary
                      );

            var frameCount = 20;

            for (var i = 0; i < frameCount; i++)
            {
                using var imageBuffer = new CVPixelBuffer(width, height, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange);
                var pts = new CMTime(40 * i, 1);
                status = session.EncodeFrame(imageBuffer, pts, duration, null, imageBuffer, out var infoFlags);
                Assert.AreEqual(status, VTStatus.Ok, $"status #{i}");
                // This looks weird, but it seems the video encoder can become overwhelmed otherwise, and it
                // will start failing (and taking a long time to do so, eventually timing out the test).
                Thread.Sleep(10);
            }
            ;
            status = session.CompleteFrames(new CMTime(40 * frameCount, 1));
            Assert.AreEqual(status, VTStatus.Ok, "status finished");
            Assert.AreEqual(callbackCounter, frameCount, "frame count");
            Assert.That(failures, Is.Empty, "no callback failures");
        }