void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput(output)) { assetReader.AddOutput(output); } AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput(input)) { assetWriter.AddInput(input); } // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
void ReadSampleBuffers(AVAsset asset) { NSError error; assetReader = AVAssetReader.FromAsset(asset, out error); if (error != null) { Console.WriteLine("Error creating Asset Reader: {0}", error.Description); } AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video); AVAssetTrack videoTrack = videoTracks [0]; CreateDecompressionSession(videoTrack); var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null); if (assetReader.CanAddOutput(videoTrackOutput)) { assetReader.AddOutput(videoTrackOutput); } if (!assetReader.StartReading()) { return; } while (assetReader.Status == AVAssetReaderStatus.Reading) { CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer(); if (sampleBuffer != null) { VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression; VTDecodeInfoFlags flagOut; decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut); sampleBuffer.Dispose(); if (presentationTimes.Count >= 5) { bufferSemaphore.Wait(); } } else if (assetReader.Status == AVAssetReaderStatus.Failed) { Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description); } else if (assetReader.Status == AVAssetReaderStatus.Completed) { Console.WriteLine("Reached the end of the video."); ChangeStatus(); ReadSampleBuffers(asset); } } }
private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); _assetReader.AddOutput(output); AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); _assetWriter.AddInput(input); // Create and save an instance of AAPLRWSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false); }
public Compression CompressVideo(NSUrl source, NSUrl destination, bool isMinBitRateEnabled = true, VideoQuality quality = VideoQuality.high, bool keepOriginalResolution = false) { var frameCount = 0; var compressionOperation = new Compression(); var videoAsset = new AVUrlAsset(source); try { var videoTrack = videoAsset.Tracks.First(x => x.MediaType == AVMediaType.Video); var bitrate = videoTrack.EstimatedDataRate; // Check for a min video bitrate before compression if (isMinBitRateEnabled && bitrate <= MIN_BITRATE) { var error = new Compression(); error.title = "The provided bitrate is smaller than what is needed for compression try to set isMinBitRateEnabled to false"; //TODO: ENVIAR ENVENTO DE ERRO return(error); } var newBitrate = getBitrate(bitrate, quality); // Handle new width and height values var videoSize = videoTrack.NaturalSize; var size = generateWidthAndHeight(videoSize.Width, videoSize.Height, keepOriginalResolution); var newWidth = size.Width; var newHeight = size.Height; // Total Frames var durationInSeconds = videoAsset.Duration.Seconds; var frameRate = videoTrack.NominalFrameRate; var totalFrames = Math.Ceiling(durationInSeconds * (double)(frameRate)); // Progress var totalUnits = Convert.ToInt64(totalFrames); //var progress = NSProgress(totalUnits); // Setup video writer input var videoWriterInput = new AVAssetWriterInput(AVMediaType.Video, getVideoWriterSettings(newBitrate, newWidth, newHeight)); videoWriterInput.ExpectsMediaDataInRealTime = true; videoWriterInput.Transform = videoTrack.PreferredTransform; NSError nSError; var videoWriter = new AVAssetWriter(destination, AVFileType.QuickTimeMovie, out nSError); videoWriter.AddInput(videoWriterInput); var videoReaderSettings = new NSDictionary( "PixelFormatType", new NSNumber(875704438) ); var videoReaderOutput = new AVAssetReaderTrackOutput(videoTrack, videoReaderSettings); AVAssetReader videoReader; try { videoReader = new AVAssetReader(videoAsset, out nSError); videoReader.AddOutput(videoReaderOutput); } catch { Console.WriteLine("video reader error: (error)"); //TODO - Chamar eventi de erro } //TODO: Verificar como passar parametro nil var audioSettings = new AudioSettings() { //EncoderBitRate = 64000, //Format = AudioToolbox.AudioFormatType., NumberChannels = 1, //SampleRate = 44100 }; var audioWriterInput = new AVAssetWriterInput(AVMediaType.Audio, audioSettings); audioWriterInput.ExpectsMediaDataInRealTime = false; videoWriter.AddInput(audioWriterInput); //setup audio reader var audioTrack = videoAsset.Tracks.FirstOrDefault(x => x.MediaType == AVMediaType.Audio); var audioReaderOutput = new AVAssetReaderTrackOutput(audioTrack, audioSettings); var audioReader = new AVAssetReader(videoAsset, out nSError); audioReader.AddOutput(audioReaderOutput); videoWriter.StartWriting(); } catch (Exception ex) { //TODO: Incluir tratamento de erro return(new Compression()); } return(new Compression()); }
void ReadSampleBuffers(AVAsset asset) { NSError error; assetReader = AVAssetReader.FromAsset (asset, out error); if (error != null) Console.WriteLine ("Error creating Asset Reader: {0}", error.Description); AVAssetTrack[] videoTracks = asset.TracksWithMediaType (AVMediaType.Video); AVAssetTrack videoTrack = videoTracks [0]; CreateDecompressionSession (videoTrack); var videoTrackOutput = AVAssetReaderTrackOutput.Create (videoTrack, (AVVideoSettingsUncompressed)null); if (assetReader.CanAddOutput (videoTrackOutput)) assetReader.AddOutput (videoTrackOutput); if (!assetReader.StartReading ()) return; while (assetReader.Status == AVAssetReaderStatus.Reading) { CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer (); if (sampleBuffer != null) { VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression; VTDecodeInfoFlags flagOut; decompressionSession.DecodeFrame (sampleBuffer, flags, IntPtr.Zero, out flagOut); sampleBuffer.Dispose (); if (presentationTimes.Count >= 5) bufferSemaphore.Wait (); } else if (assetReader.Status == AVAssetReaderStatus.Failed) { Console.WriteLine ("Asset Reader failed with error: {0}", assetReader.Error.Description); } else if (assetReader.Status == AVAssetReaderStatus.Completed) { Console.WriteLine("Reached the end of the video."); ChangeStatus (); ReadSampleBuffers (asset); } } }