Exemple #1
0
        void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter.
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            if (assetReader.CanAddOutput(output))
            {
                assetReader.AddOutput(output);
            }

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            if (assetWriter.CanAddInput(input))
            {
                assetWriter.AddInput(input);
            }

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            audioSampleBufferChannel = new AudioChannel(output, input);
        }
        public bool SetupAssetWriterAudioInput(CMFormatDescription currentFormatDescription)
        {
            // If the AudioStreamBasicDescription is null return false;
            if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
            {
                return(false);
            }

            var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

            // Get the Audio Channel Layout from the Format Description.
            var currentChannelLayout     = currentFormatDescription.AudioChannelLayout;
            var currentChannelLayoutData = currentChannelLayout == null ? new NSData() : currentChannelLayout.AsData();

            NSDictionary audioCompressionSettings = NSDictionary.FromObjectsAndKeys(
                new NSObject[]
            {
                NSNumber.FromInt32((int)AudioFormatType.MPEG4AAC),
                NSNumber.FromDouble(currentASBD.SampleRate),
                NSNumber.FromInt32(64000),
                NSNumber.FromInt32(currentASBD.ChannelsPerFrame),
                currentChannelLayoutData
            },
                new NSObject[]
            {
                AVAudioSettings.AVFormatIDKey,
                AVAudioSettings.AVSampleRateKey,
                AVAudioSettings.AVEncoderBitRateKey,
                AVAudioSettings.AVNumberOfChannelsKey,
                new NSString("AVChannelLayoutKey")                         //AVAudioSettings.AVChannelLayoutKey,
            });

            if (assetWriter.CanApplyOutputSettings(audioCompressionSettings, AVMediaType.Audio))
            {
                assetWriterAudioIn = new AVAssetWriterInput(AVMediaType.Audio, audioCompressionSettings);
                assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

                if (assetWriter.CanAddInput(assetWriterAudioIn))
                {
                    assetWriter.AddInput(assetWriterAudioIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer audio input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply audio output settings.");
                return(false);
            }

            return(true);
        }
		public ReadWriteSampleBufferChannel (AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput)
		{
			if (readerOutput == null)
				throw new ArgumentNullException ("readerOutput");
			if (writerInput == null)
				throw new ArgumentNullException ("writerInput");

			this.readerOutput = readerOutput;
			this.writerInput = writerInput;

			serializationQueue = new DispatchQueue ("ReadWriteSampleBufferChannel queue");
		}
		public VideoChannel (AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput, IVideoTransformer transformer)
			: base(readerOutput, writerInput)
		{
			if (transformer == null)
				throw new ArgumentNullException ("transformer");

			this.transformer = transformer;

			var adaptorAttrs = new CVPixelBufferAttributes {
				PixelFormatType = CVPixelFormatType.CV32BGRA
			};
			adaptor = new AVAssetWriterInputPixelBufferAdaptor (WriterInput, adaptorAttrs);
		}
Exemple #5
0
        void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack)
        {
            if (videoTrack == null)
            {
                return;
            }

            // Decompress to ARGB with the asset reader
            var decompSettings = new AVVideoSettingsUncompressed {
                PixelFormatType       = CVPixelFormatType.CV32BGRA,
                AllocateWithIOSurface = null
            };
            AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);

            assetReader.AddOutput(output);

            // Get the format description of the track, to fill in attributes of the video stream that we don't want to change
            var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault();
            // Grab track dimensions from format description
            CGSize trackDimensions = formatDescription != null
                                ? formatDescription.GetPresentationDimensions(false, false)
                                : videoTrack.NaturalSize;

            // Grab clean aperture, pixel aspect ratio from format description
            AVVideoCodecSettings compressionSettings = null;

            if (formatDescription != null)
            {
                var cleanApertureDescr    = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey);
                var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey);
                compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr);
            }

            // Compress to H.264 with the asset writer
            var videoSettings = new AVVideoSettingsCompressed {
                Codec         = AVVideoCodec.H264,
                Width         = (int)trackDimensions.Width,
                Height        = (int)trackDimensions.Height,
                CodecSettings = compressionSettings
            };
            AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings);

            input.Transform = videoTrack.PreferredTransform;
            assetWriter.AddInput(input);

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            videoSampleBufferChannel = new VideoChannel(output, input, transformer);
        }
        // HACK: Change CMFormatDescription to CMVideoFormatDescription
        public bool SetupAssetWriterVideoInput(CMVideoFormatDescription currentFormatDescription)
        {
            //Console.WriteLine ("Setting up Video Asset Writer");
            float bitsPerPixel;
            // HACK: Change VideoDimensions to Dimensions, as this type was changed to CMVideoFormatDescription
            var dimensions = currentFormatDescription.Dimensions;
            int numPixels  = dimensions.Width * dimensions.Height;
            int bitsPerSecond;

            // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
            bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;

            bitsPerSecond = (int)(numPixels * bitsPerPixel);

            NSDictionary videoCompressionSettings = new NSDictionary(
                AVVideo.CodecKey, AVVideo.CodecH264,
                AVVideo.WidthKey, dimensions.Width,
                AVVideo.HeightKey, dimensions.Height,
                AVVideo.CompressionPropertiesKey, new NSDictionary(
                    AVVideo.AverageBitRateKey, bitsPerSecond,
                    AVVideo.MaxKeyFrameIntervalKey, 30
                    )
                );

            if (assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video))
            {
                // HACK: Change NSDictionary into AVVideoSettingsCompressed created using that NSDictionary (videoCompressionSettings)
                assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, new AVVideoSettingsCompressed(videoCompressionSettings));
                assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
                assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation(ReferenceOrientation);

                if (assetWriter.CanAddInput(assetWriterVideoIn))
                {
                    assetWriter.AddInput(assetWriterVideoIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer video input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply video output settings.");
            }

            return(true);
        }
        public void StopRecording()
        {
            movieWritingQueue.DispatchAsync(() =>
            {
                if (recordingWillBeStopped || !IsRecording)
                {
                    return;
                }

                recordingWillBeStopped = true;

                // recordingDidStop is called from saveMovieToCameraRoll
                RecordingWillStop?.Invoke();

                if (assetWriter.FinishWriting())
                {
                    if (assetWriterAudioIn != null)
                    {
                        assetWriterAudioIn.Dispose();
                        assetWriterAudioIn = null;
                    }

                    if (assetWriterVideoIn != null)
                    {
                        assetWriterVideoIn.Dispose();
                        assetWriterVideoIn = null;
                    }

                    lock (inUse)
                    {
                        assetWriter.Dispose();
                        assetWriter = null;

                        // Clear the 'Inuse' list when we're creating a new Recording session.
                        inUse.Clear();
                    }

                    readyToRecordVideo = false;
                    readyToRecordAudio = false;

                    SaveMovieToCameraRoll();
                }
                else
                {
                    ShowError(assetWriter.Error);
                }
            });
        }
Exemple #8
0
        public ReadWriteSampleBufferChannel(AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput)
        {
            if (readerOutput == null)
            {
                throw new ArgumentNullException("readerOutput");
            }
            if (writerInput == null)
            {
                throw new ArgumentNullException("writerInput");
            }

            this.readerOutput = readerOutput;
            this.writerInput  = writerInput;

            serializationQueue = new DispatchQueue("ReadWriteSampleBufferChannel queue");
        }
Exemple #9
0
        public VideoChannel(AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput, IVideoTransformer transformer)
            : base(readerOutput, writerInput)
        {
            if (transformer == null)
            {
                throw new ArgumentNullException("transformer");
            }

            this.transformer = transformer;

            var adaptorAttrs = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            adaptor = new AVAssetWriterInputPixelBufferAdaptor(WriterInput, adaptorAttrs);
        }
		public ReadWriteSampleBufferChannel (AVAssetReaderOutput localAssetReaderOutput,
			AVAssetWriterInput localAssetWriterInput,
			bool useAdaptor)
		{
			_assetReaderOutput = localAssetReaderOutput;
			_assetWriterInput = localAssetWriterInput;
			_useAdaptor = useAdaptor;

			if (_useAdaptor) {
				var adaptorAttrs = new CVPixelBufferAttributes {
					PixelFormatType = CVPixelFormatType.CV32BGRA
				};
				_adaptor = AVAssetWriterInputPixelBufferAdaptor.FromInput (localAssetWriterInput, adaptorAttrs.Dictionary);
			}

			_serializationQueue = new DispatchQueue ("ReadWriteSampleBufferChannel queue");
		}
        public bool SetupAssetWriterAudioInput(CMFormatDescription currentFormatDescription)
        {
            // If the AudioStreamBasicDescription is null return false;
            if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
            {
                return(false);
            }

            var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

            // Get the Audio Channel Layout from the Format Description.
            var currentChannelLayout     = currentFormatDescription.AudioChannelLayout;
            var currentChannelLayoutData = currentChannelLayout == null ? new NSData() : currentChannelLayout.AsData();

            NSDictionary audioCompressionSettings = new NSDictionary(
                AVAudioSettings.AVFormatIDKey, AudioFormatType.MPEG4AAC,
                AVAudioSettings.AVSampleRateKey, currentASBD.SampleRate,
                AVAudioSettings.AVEncoderBitRateKey, 64000,
                AVAudioSettings.AVNumberOfChannelsKey, currentASBD.ChannelsPerFrame,
                AVAudioSettings.AVChannelLayoutKey, currentChannelLayoutData
                );

            if (assetWriter.CanApplyOutputSettings(audioCompressionSettings, AVMediaType.Audio))
            {
                // HACK: Change NSDictionary into AudioSettings created using that NSDictionary (audioCompressionSettings)
                assetWriterAudioIn = new AVAssetWriterInput(AVMediaType.Audio, new AudioSettings(audioCompressionSettings));
                assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

                if (assetWriter.CanAddInput(assetWriterAudioIn))
                {
                    assetWriter.AddInput(assetWriterAudioIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer audio input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply audio output settings.");
                return(false);
            }

            return(true);
        }
Exemple #12
0
        public ReadWriteSampleBufferChannel(AVAssetReaderOutput localAssetReaderOutput,
                                            AVAssetWriterInput localAssetWriterInput,
                                            bool useAdaptor)
        {
            _assetReaderOutput = localAssetReaderOutput;
            _assetWriterInput  = localAssetWriterInput;
            _useAdaptor        = useAdaptor;

            if (_useAdaptor)
            {
                var adaptorAttrs = new CVPixelBufferAttributes {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                };
                _adaptor = AVAssetWriterInputPixelBufferAdaptor.FromInput(localAssetWriterInput, adaptorAttrs.Dictionary);
            }

            _serializationQueue = new DispatchQueue("ReadWriteSampleBufferChannel queue");
        }
        private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            _assetReader.AddOutput(output);

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            _assetWriter.AddInput(input);

            // Create and save an instance of AAPLRWSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false);
        }
        public bool SetupAssetWriterVideoInput(CMFormatDescription currentFormatDescription)
        {
            //Console.WriteLine ("Setting up Video Asset Writer");
            float bitsPerPixel;
            var   dimensions = currentFormatDescription.VideoDimensions;
            int   numPixels  = dimensions.Width * dimensions.Height;
            int   bitsPerSecond;

            // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
            bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;

            bitsPerSecond = (int)(numPixels * bitsPerPixel);

            NSDictionary videoCompressionSettings = NSDictionary.FromObjectsAndKeys(
                new NSObject[]
            {                       // The Compression Settings Values
                AVVideo.CodecH264,
                NSNumber.FromInt32(dimensions.Width),
                NSNumber.FromInt32(dimensions.Height),
                NSDictionary.FromObjectsAndKeys(
                    new object[]
                {                                       // Compression Property Values
                    NSNumber.FromInt32(bitsPerSecond),
                    NSNumber.FromInt32(30)
                },
                    new object[]
                {                                       // Compression Property Keys
                    AVVideo.AverageBitRateKey,
                    AVVideo.MaxKeyFrameIntervalKey
                })
            },
                new NSObject[]
            {                           // The Compression Settings Keys
                AVVideo.CodecKey,
                AVVideo.WidthKey,
                AVVideo.HeightKey,
                AVVideo.CompressionPropertiesKey
            }
                );

            if (assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video))
            {
                assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, videoCompressionSettings);
                assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
                assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation(ReferenceOrientation);

                if (assetWriter.CanAddInput(assetWriterVideoIn))
                {
                    assetWriter.AddInput(assetWriterVideoIn);
                }
                else
                {
                    Console.WriteLine("Couldn't add asset writer video input.");
                    return(false);
                }
            }
            else
            {
                Console.WriteLine("Couldn't apply video output settings.");
            }

            return(true);
        }
        public Compression CompressVideo(NSUrl source, NSUrl destination, bool isMinBitRateEnabled = true, VideoQuality quality = VideoQuality.high, bool keepOriginalResolution = false)
        {
            var frameCount           = 0;
            var compressionOperation = new Compression();

            var videoAsset = new AVUrlAsset(source);

            try
            {
                var videoTrack = videoAsset.Tracks.First(x => x.MediaType == AVMediaType.Video);
                var bitrate    = videoTrack.EstimatedDataRate;

                // Check for a min video bitrate before compression
                if (isMinBitRateEnabled && bitrate <= MIN_BITRATE)
                {
                    var error = new Compression();
                    error.title = "The provided bitrate is smaller than what is needed for compression try to set isMinBitRateEnabled to false";
                    //TODO: ENVIAR ENVENTO DE ERRO
                    return(error);
                }

                var newBitrate = getBitrate(bitrate, quality);

                // Handle new width and height values
                var videoSize = videoTrack.NaturalSize;
                var size      = generateWidthAndHeight(videoSize.Width, videoSize.Height, keepOriginalResolution);
                var newWidth  = size.Width;
                var newHeight = size.Height;

                // Total Frames
                var durationInSeconds = videoAsset.Duration.Seconds;
                var frameRate         = videoTrack.NominalFrameRate;
                var totalFrames       = Math.Ceiling(durationInSeconds * (double)(frameRate));

                // Progress
                var totalUnits = Convert.ToInt64(totalFrames);
                //var progress = NSProgress(totalUnits);

                // Setup video writer input
                var videoWriterInput = new AVAssetWriterInput(AVMediaType.Video, getVideoWriterSettings(newBitrate, newWidth, newHeight));
                videoWriterInput.ExpectsMediaDataInRealTime = true;
                videoWriterInput.Transform = videoTrack.PreferredTransform;

                NSError nSError;

                var videoWriter = new AVAssetWriter(destination, AVFileType.QuickTimeMovie, out nSError);
                videoWriter.AddInput(videoWriterInput);

                var videoReaderSettings = new NSDictionary(
                    "PixelFormatType", new NSNumber(875704438)
                    );

                var videoReaderOutput = new AVAssetReaderTrackOutput(videoTrack, videoReaderSettings);

                AVAssetReader videoReader;
                try
                {
                    videoReader = new AVAssetReader(videoAsset, out nSError);
                    videoReader.AddOutput(videoReaderOutput);
                }
                catch
                {
                    Console.WriteLine("video reader error: (error)");
                    //TODO - Chamar eventi de erro
                }

                //TODO: Verificar como passar parametro nil
                var audioSettings = new AudioSettings()
                {
                    //EncoderBitRate = 64000,
                    //Format = AudioToolbox.AudioFormatType.,
                    NumberChannels = 1,
                    //SampleRate = 44100
                };

                var audioWriterInput = new AVAssetWriterInput(AVMediaType.Audio, audioSettings);
                audioWriterInput.ExpectsMediaDataInRealTime = false;
                videoWriter.AddInput(audioWriterInput);

                //setup audio reader
                var audioTrack        = videoAsset.Tracks.FirstOrDefault(x => x.MediaType == AVMediaType.Audio);
                var audioReaderOutput = new AVAssetReaderTrackOutput(audioTrack, audioSettings);
                var audioReader       = new AVAssetReader(videoAsset, out nSError);
                audioReader.AddOutput(audioReaderOutput);
                videoWriter.StartWriting();
            }
            catch (Exception ex)
            {
                //TODO: Incluir tratamento de erro
                return(new Compression());
            }

            return(new Compression());
        }
		public bool SetupAssetWriterVideoInput (CMFormatDescription currentFormatDescription)
		{
			//Console.WriteLine ("Setting up Video Asset Writer");
			float bitsPerPixel;
			var dimensions = currentFormatDescription.VideoDimensions;
			int numPixels = dimensions.Width * dimensions.Height;
			int bitsPerSecond; 
			
			// Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
			bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;
			
			bitsPerSecond = (int) (numPixels * bitsPerPixel);
			
			NSDictionary videoCompressionSettings = NSDictionary.FromObjectsAndKeys (
				new NSObject[] 
				{   // The Compression Settings Values
					AVVideo.CodecH264,
					NSNumber.FromInt32 (dimensions.Width),
					NSNumber.FromInt32 (dimensions.Height),
					NSDictionary.FromObjectsAndKeys (
						new object[] 
						{	// Compression Property Values
							NSNumber.FromInt32 (bitsPerSecond),
							NSNumber.FromInt32 (30)
						},
						new object[]
						{	// Compression Property Keys
							AVVideo.AverageBitRateKey,
							AVVideo.MaxKeyFrameIntervalKey
						})
				},
				new NSObject[]
				{	// The Compression Settings Keys
					AVVideo.CodecKey,
					AVVideo.WidthKey,
					AVVideo.HeightKey,
					AVVideo.CompressionPropertiesKey
				}
				);
			
			if (assetWriter.CanApplyOutputSettings (videoCompressionSettings, AVMediaType.Video)){
				assetWriterVideoIn = new AVAssetWriterInput (AVMediaType.Video, videoCompressionSettings);
				assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
				assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation (ReferenceOrientation);
				
				if (assetWriter.CanAddInput (assetWriterVideoIn))
					assetWriter.AddInput (assetWriterVideoIn);
				else {
					Console.WriteLine ("Couldn't add asset writer video input.");
					return false;
				}
			} else 
				Console.WriteLine ("Couldn't apply video output settings.");	
			
			return true;
		}
		public bool SetupAssetWriterAudioInput (CMFormatDescription currentFormatDescription)
		{
			// If the AudioStreamBasicDescription is null return false;
			if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
				return false;

			var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

			// Get the Audio Channel Layout from the Format Description.
			var currentChannelLayout = currentFormatDescription.AudioChannelLayout;
			var currentChannelLayoutData = currentChannelLayout == null ? new NSData () : currentChannelLayout.AsData ();

			NSDictionary audioCompressionSettings = NSDictionary.FromObjectsAndKeys (
				new NSObject[]
				{ 
					NSNumber.FromInt32 ((int)AudioFormatType.MPEG4AAC), 
					NSNumber.FromDouble (currentASBD.SampleRate),
					NSNumber.FromInt32 (64000),
					NSNumber.FromInt32 (currentASBD.ChannelsPerFrame),
					currentChannelLayoutData
				},
				new NSObject[]
				{ 
					AVAudioSettings.AVFormatIDKey,
					AVAudioSettings.AVSampleRateKey,
					AVAudioSettings.AVEncoderBitRateKey,
					AVAudioSettings.AVNumberOfChannelsKey,
					new NSString("AVChannelLayoutKey") //AVAudioSettings.AVChannelLayoutKey,
				});

			if (assetWriter.CanApplyOutputSettings (audioCompressionSettings, AVMediaType.Audio)){
				assetWriterAudioIn = new AVAssetWriterInput (AVMediaType.Audio, audioCompressionSettings);
				assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

				if (assetWriter.CanAddInput (assetWriterAudioIn))
					assetWriter.AddInput (assetWriterAudioIn);
				else {
					Console.WriteLine ("Couldn't add asset writer audio input.");
					return false;
				}
			} else {
				Console.WriteLine ("Couldn't apply audio output settings.");
				return false;
			}

			return true;
		}
		// HACK: Change CMFormatDescription to CMVideoFormatDescription
		public bool SetupAssetWriterVideoInput (CMVideoFormatDescription currentFormatDescription)
		{
			//Console.WriteLine ("Setting up Video Asset Writer");
			float bitsPerPixel;
			// HACK: Change VideoDimensions to Dimensions, as this type was changed to CMVideoFormatDescription
			var dimensions = currentFormatDescription.Dimensions;
			int numPixels = dimensions.Width * dimensions.Height;
			int bitsPerSecond;

			// Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate
			bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F;

			bitsPerSecond = (int) (numPixels * bitsPerPixel);

			NSDictionary videoCompressionSettings = new NSDictionary (
				AVVideo.CodecKey, AVVideo.CodecH264,
				AVVideo.WidthKey, dimensions.Width,
				AVVideo.HeightKey,dimensions.Height,
				AVVideo.CompressionPropertiesKey, new NSDictionary(
					AVVideo.AverageBitRateKey, bitsPerSecond,
					AVVideo.MaxKeyFrameIntervalKey, 30
				)
			);

			if (assetWriter.CanApplyOutputSettings (videoCompressionSettings, AVMediaType.Video)){
				// HACK: Change NSDictionary into AVVideoSettingsCompressed created using that NSDictionary (videoCompressionSettings)
				assetWriterVideoIn = new AVAssetWriterInput (AVMediaType.Video, new AVVideoSettingsCompressed( videoCompressionSettings));
				assetWriterVideoIn.ExpectsMediaDataInRealTime = true;
				assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation (ReferenceOrientation);

				if (assetWriter.CanAddInput (assetWriterVideoIn))
					assetWriter.AddInput (assetWriterVideoIn);
				else {
					Console.WriteLine ("Couldn't add asset writer video input.");
					return false;
				}
			} else
				Console.WriteLine ("Couldn't apply video output settings.");

			return true;
		}
		public AudioChannel (AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput)
			: base(readerOutput, writerInput)
		{
		}
		public bool SetupAssetWriterAudioInput (CMFormatDescription currentFormatDescription)
		{
			// If the AudioStreamBasicDescription is null return false;
			if (!currentFormatDescription.AudioStreamBasicDescription.HasValue)
				return false;

			var currentASBD = currentFormatDescription.AudioStreamBasicDescription.Value;

			// Get the Audio Channel Layout from the Format Description.
			var currentChannelLayout = currentFormatDescription.AudioChannelLayout;
			var currentChannelLayoutData = currentChannelLayout == null ? new NSData () : currentChannelLayout.AsData ();

			NSDictionary audioCompressionSettings = new NSDictionary (
				AVAudioSettings.AVFormatIDKey, AudioFormatType.MPEG4AAC,
				AVAudioSettings.AVSampleRateKey, currentASBD.SampleRate,
				AVAudioSettings.AVEncoderBitRateKey, 64000,
				AVAudioSettings.AVNumberOfChannelsKey, currentASBD.ChannelsPerFrame,
				AVAudioSettings.AVChannelLayoutKey, currentChannelLayoutData
			);

			if (assetWriter.CanApplyOutputSettings (audioCompressionSettings, AVMediaType.Audio)){
				// HACK: Change NSDictionary into AudioSettings created using that NSDictionary (audioCompressionSettings)
				assetWriterAudioIn = new AVAssetWriterInput (AVMediaType.Audio, new AudioSettings(audioCompressionSettings));
				assetWriterAudioIn.ExpectsMediaDataInRealTime = true;

				if (assetWriter.CanAddInput (assetWriterAudioIn))
					assetWriter.AddInput (assetWriterAudioIn);
				else {
					Console.WriteLine ("Couldn't add asset writer audio input.");
					return false;
				}
			} else {
				Console.WriteLine ("Couldn't apply audio output settings.");
				return false;
			}

			return true;
		}
		void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack)
		{
			if (videoTrack == null)
				return;

			// Decompress to ARGB with the asset reader
			var decompSettings = new AVVideoSettingsUncompressed {
				PixelFormatType = CVPixelFormatType.CV32BGRA,
				AllocateWithIOSurface = null
			};
			AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings);
			assetReader.AddOutput (output);

			// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
			var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault ();
			// Grab track dimensions from format description
			CGSize trackDimensions = formatDescription != null
				? formatDescription.GetPresentationDimensions (false, false)
				: videoTrack.NaturalSize;

			// Grab clean aperture, pixel aspect ratio from format description
			AVVideoCodecSettings compressionSettings = null;
			if (formatDescription != null) {
				var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
				var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
				compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
			}

			// Compress to H.264 with the asset writer
			var videoSettings = new AVVideoSettingsCompressed {
				Codec = AVVideoCodec.H264,
				Width = (int)trackDimensions.Width,
				Height = (int)trackDimensions.Height,
				CodecSettings = compressionSettings
			};
			AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings);
			input.Transform = videoTrack.PreferredTransform;
			assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			videoSampleBufferChannel = new VideoChannel (output, input, transformer);
		}
        protected AVAssetWriterInput MaybeInitializeInputWriter()
        {
            try
            {
                var dictionary = NSDictionary.FromObjectsAndKeys(
                new NSObject[] { AVVideo.CodecH264, new NSNumber(640), new NSNumber(480) },
                new NSObject[] { AVVideo.CodecKey, AVVideo.WidthKey, AVVideo.HeightKey }
                );

                var writerInput = new AVAssetWriterInput(AVMediaType.Video, dictionary);
                writerInput.ExpectsMediaDataInRealTime = true;
                return writerInput;
            }
            catch (Exception x)
            {
                Failure.Alert(x.Message);
                return null;
            }
        }
        public bool StartRecording()
        {
            try
            {
                session = MaybeInitializeSession();
                if (session == null)
                {
                    Failure.Alert("Couldn't initialize session");
                    return false;
                }
                writer = MaybeInitializeAssetWriter();
                if (writer == null)
                {
                    Failure.Alert("Couldn't initialize writer");
                    return false;
                }
                inputWriter = MaybeInitializeInputWriter();
                if (inputWriter == null)
                {
                    Failure.Alert("Couldn't initialize input writer");
                    return false;
                }
                if (!writer.CanAddInput(inputWriter))
                {
                    Failure.Alert("Couldn't add input writer to writer");
                    return false;
                }
                writer.AddInput(inputWriter);

                session.StartRunning();
                return true;
            }
            catch (Exception x)
            {
                Failure.Alert(x.Message);
                return false;
            }
        }
 public AudioChannel(AVAssetReaderOutput readerOutput, AVAssetWriterInput writerInput)
     : base(readerOutput, writerInput)
 {
 }