void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput(output)) { assetReader.AddOutput(output); } AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput(input)) { assetWriter.AddInput(input); } // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
// HACK: Change CMFormatDescription to CMVideoFormatDescription public bool SetupAssetWriterVideoInput(CMVideoFormatDescription currentFormatDescription) { //Console.WriteLine ("Setting up Video Asset Writer"); float bitsPerPixel; // HACK: Change VideoDimensions to Dimensions, as this type was changed to CMVideoFormatDescription var dimensions = currentFormatDescription.Dimensions; int numPixels = dimensions.Width * dimensions.Height; int bitsPerSecond; // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F; bitsPerSecond = (int)(numPixels * bitsPerPixel); NSDictionary videoCompressionSettings = new NSDictionary( AVVideo.CodecKey, AVVideo.CodecH264, AVVideo.WidthKey, dimensions.Width, AVVideo.HeightKey, dimensions.Height, AVVideo.CompressionPropertiesKey, new NSDictionary( AVVideo.AverageBitRateKey, bitsPerSecond, AVVideo.MaxKeyFrameIntervalKey, 30 ) ); if (assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video)) { // HACK: Change NSDictionary into AVVideoSettingsCompressed created using that NSDictionary (videoCompressionSettings) assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, new AVVideoSettingsCompressed(videoCompressionSettings)); assetWriterVideoIn.ExpectsMediaDataInRealTime = true; assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation(ReferenceOrientation); if (assetWriter.CanAddInput(assetWriterVideoIn)) { assetWriter.AddInput(assetWriterVideoIn); } else { Console.WriteLine("Couldn't add asset writer video input."); return(false); } } else { Console.WriteLine("Couldn't apply video output settings."); } return(true); }
private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); _assetReader.AddOutput(output); AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); _assetWriter.AddInput(input); // Create and save an instance of AAPLRWSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false); }
public bool StartRecording() { try { session = MaybeInitializeSession(); if (session == null) { Failure.Alert("Couldn't initialize session"); return false; } writer = MaybeInitializeAssetWriter(); if (writer == null) { Failure.Alert("Couldn't initialize writer"); return false; } inputWriter = MaybeInitializeInputWriter(); if (inputWriter == null) { Failure.Alert("Couldn't initialize input writer"); return false; } if (!writer.CanAddInput(inputWriter)) { Failure.Alert("Couldn't add input writer to writer"); return false; } writer.AddInput(inputWriter); session.StartRunning(); return true; } catch (Exception x) { Failure.Alert(x.Message); return false; } }
public Compression CompressVideo(NSUrl source, NSUrl destination, bool isMinBitRateEnabled = true, VideoQuality quality = VideoQuality.high, bool keepOriginalResolution = false) { var frameCount = 0; var compressionOperation = new Compression(); var videoAsset = new AVUrlAsset(source); try { var videoTrack = videoAsset.Tracks.First(x => x.MediaType == AVMediaType.Video); var bitrate = videoTrack.EstimatedDataRate; // Check for a min video bitrate before compression if (isMinBitRateEnabled && bitrate <= MIN_BITRATE) { var error = new Compression(); error.title = "The provided bitrate is smaller than what is needed for compression try to set isMinBitRateEnabled to false"; //TODO: ENVIAR ENVENTO DE ERRO return(error); } var newBitrate = getBitrate(bitrate, quality); // Handle new width and height values var videoSize = videoTrack.NaturalSize; var size = generateWidthAndHeight(videoSize.Width, videoSize.Height, keepOriginalResolution); var newWidth = size.Width; var newHeight = size.Height; // Total Frames var durationInSeconds = videoAsset.Duration.Seconds; var frameRate = videoTrack.NominalFrameRate; var totalFrames = Math.Ceiling(durationInSeconds * (double)(frameRate)); // Progress var totalUnits = Convert.ToInt64(totalFrames); //var progress = NSProgress(totalUnits); // Setup video writer input var videoWriterInput = new AVAssetWriterInput(AVMediaType.Video, getVideoWriterSettings(newBitrate, newWidth, newHeight)); videoWriterInput.ExpectsMediaDataInRealTime = true; videoWriterInput.Transform = videoTrack.PreferredTransform; NSError nSError; var videoWriter = new AVAssetWriter(destination, AVFileType.QuickTimeMovie, out nSError); videoWriter.AddInput(videoWriterInput); var videoReaderSettings = new NSDictionary( "PixelFormatType", new NSNumber(875704438) ); var videoReaderOutput = new AVAssetReaderTrackOutput(videoTrack, videoReaderSettings); AVAssetReader videoReader; try { videoReader = new AVAssetReader(videoAsset, out nSError); videoReader.AddOutput(videoReaderOutput); } catch { Console.WriteLine("video reader error: (error)"); //TODO - Chamar eventi de erro } //TODO: Verificar como passar parametro nil var audioSettings = new AudioSettings() { //EncoderBitRate = 64000, //Format = AudioToolbox.AudioFormatType., NumberChannels = 1, //SampleRate = 44100 }; var audioWriterInput = new AVAssetWriterInput(AVMediaType.Audio, audioSettings); audioWriterInput.ExpectsMediaDataInRealTime = false; videoWriter.AddInput(audioWriterInput); //setup audio reader var audioTrack = videoAsset.Tracks.FirstOrDefault(x => x.MediaType == AVMediaType.Audio); var audioReaderOutput = new AVAssetReaderTrackOutput(audioTrack, audioSettings); var audioReader = new AVAssetReader(videoAsset, out nSError); audioReader.AddOutput(audioReaderOutput); videoWriter.StartWriting(); } catch (Exception ex) { //TODO: Incluir tratamento de erro return(new Compression()); } return(new Compression()); }
public bool SetupAssetWriterVideoInput(CMFormatDescription currentFormatDescription) { //Console.WriteLine ("Setting up Video Asset Writer"); float bitsPerPixel; var dimensions = currentFormatDescription.VideoDimensions; int numPixels = dimensions.Width * dimensions.Height; int bitsPerSecond; // Assume that lower-than-SD resolution are intended for streaming, and use a lower bitrate bitsPerPixel = numPixels < (640 * 480) ? 4.05F : 11.4F; bitsPerSecond = (int)(numPixels * bitsPerPixel); NSDictionary videoCompressionSettings = NSDictionary.FromObjectsAndKeys( new NSObject[] { // The Compression Settings Values AVVideo.CodecH264, NSNumber.FromInt32(dimensions.Width), NSNumber.FromInt32(dimensions.Height), NSDictionary.FromObjectsAndKeys( new object[] { // Compression Property Values NSNumber.FromInt32(bitsPerSecond), NSNumber.FromInt32(30) }, new object[] { // Compression Property Keys AVVideo.AverageBitRateKey, AVVideo.MaxKeyFrameIntervalKey }) }, new NSObject[] { // The Compression Settings Keys AVVideo.CodecKey, AVVideo.WidthKey, AVVideo.HeightKey, AVVideo.CompressionPropertiesKey } ); if (assetWriter.CanApplyOutputSettings(videoCompressionSettings, AVMediaType.Video)) { assetWriterVideoIn = new AVAssetWriterInput(AVMediaType.Video, videoCompressionSettings); assetWriterVideoIn.ExpectsMediaDataInRealTime = true; assetWriterVideoIn.Transform = TransformFromCurrentVideoOrientationToOrientation(ReferenceOrientation); if (assetWriter.CanAddInput(assetWriterVideoIn)) { assetWriter.AddInput(assetWriterVideoIn); } else { Console.WriteLine("Couldn't add asset writer video input."); return(false); } } else { Console.WriteLine("Couldn't apply video output settings."); } return(true); }