void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput(output)) { assetReader.AddOutput(output); } AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput(input)) { assetWriter.AddInput(input); } // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
public bool RestartReading() { var result = false; this.assetReader = AVAssetReader.FromAsset(this.videoAsset, out NSError error); if (error == null) { var settings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange }; this.videoAssetReaderOutput = new AVAssetReaderTrackOutput(this.videoTrack, settings); if (this.videoAssetReaderOutput != null) { this.videoAssetReaderOutput.AlwaysCopiesSampleData = true; if (this.assetReader.CanAddOutput(this.videoAssetReaderOutput)) { this.assetReader.AddOutput(this.videoAssetReaderOutput); result = this.assetReader.StartReading(); } } } else { Console.WriteLine($"Failed to create AVAssetReader object: {error}"); } return(result); }
void ReadSampleBuffers(AVAsset asset) { NSError error; assetReader = AVAssetReader.FromAsset(asset, out error); if (error != null) { Console.WriteLine("Error creating Asset Reader: {0}", error.Description); } AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video); AVAssetTrack videoTrack = videoTracks [0]; CreateDecompressionSession(videoTrack); var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null); if (assetReader.CanAddOutput(videoTrackOutput)) { assetReader.AddOutput(videoTrackOutput); } if (!assetReader.StartReading()) { return; } while (assetReader.Status == AVAssetReaderStatus.Reading) { CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer(); if (sampleBuffer != null) { VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression; VTDecodeInfoFlags flagOut; decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut); sampleBuffer.Dispose(); if (presentationTimes.Count >= 5) { bufferSemaphore.Wait(); } } else if (assetReader.Status == AVAssetReaderStatus.Failed) { Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description); } else if (assetReader.Status == AVAssetReaderStatus.Completed) { Console.WriteLine("Reached the end of the video."); ChangeStatus(); ReadSampleBuffers(asset); } } }
void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack) { if (videoTrack == null) { return; } // Decompress to ARGB with the asset reader var decompSettings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV32BGRA, AllocateWithIOSurface = null }; AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings); assetReader.AddOutput(output); // Get the format description of the track, to fill in attributes of the video stream that we don't want to change var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault(); // Grab track dimensions from format description CGSize trackDimensions = formatDescription != null ? formatDescription.GetPresentationDimensions(false, false) : videoTrack.NaturalSize; // Grab clean aperture, pixel aspect ratio from format description AVVideoCodecSettings compressionSettings = null; if (formatDescription != null) { var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey); var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey); compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr); } // Compress to H.264 with the asset writer var videoSettings = new AVVideoSettingsCompressed { Codec = AVVideoCodec.H264, Width = (int)trackDimensions.Width, Height = (int)trackDimensions.Height, CodecSettings = compressionSettings }; AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings); input.Transform = videoTrack.PreferredTransform; assetWriter.AddInput(input); // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers videoSampleBufferChannel = new VideoChannel(output, input, transformer); }
private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); _assetReader.AddOutput(output); AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); _assetWriter.AddInput(input); // Create and save an instance of AAPLRWSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false); }
public Compression CompressVideo(NSUrl source, NSUrl destination, bool isMinBitRateEnabled = true, VideoQuality quality = VideoQuality.high, bool keepOriginalResolution = false) { var frameCount = 0; var compressionOperation = new Compression(); var videoAsset = new AVUrlAsset(source); try { var videoTrack = videoAsset.Tracks.First(x => x.MediaType == AVMediaType.Video); var bitrate = videoTrack.EstimatedDataRate; // Check for a min video bitrate before compression if (isMinBitRateEnabled && bitrate <= MIN_BITRATE) { var error = new Compression(); error.title = "The provided bitrate is smaller than what is needed for compression try to set isMinBitRateEnabled to false"; //TODO: ENVIAR ENVENTO DE ERRO return(error); } var newBitrate = getBitrate(bitrate, quality); // Handle new width and height values var videoSize = videoTrack.NaturalSize; var size = generateWidthAndHeight(videoSize.Width, videoSize.Height, keepOriginalResolution); var newWidth = size.Width; var newHeight = size.Height; // Total Frames var durationInSeconds = videoAsset.Duration.Seconds; var frameRate = videoTrack.NominalFrameRate; var totalFrames = Math.Ceiling(durationInSeconds * (double)(frameRate)); // Progress var totalUnits = Convert.ToInt64(totalFrames); //var progress = NSProgress(totalUnits); // Setup video writer input var videoWriterInput = new AVAssetWriterInput(AVMediaType.Video, getVideoWriterSettings(newBitrate, newWidth, newHeight)); videoWriterInput.ExpectsMediaDataInRealTime = true; videoWriterInput.Transform = videoTrack.PreferredTransform; NSError nSError; var videoWriter = new AVAssetWriter(destination, AVFileType.QuickTimeMovie, out nSError); videoWriter.AddInput(videoWriterInput); var videoReaderSettings = new NSDictionary( "PixelFormatType", new NSNumber(875704438) ); var videoReaderOutput = new AVAssetReaderTrackOutput(videoTrack, videoReaderSettings); AVAssetReader videoReader; try { videoReader = new AVAssetReader(videoAsset, out nSError); videoReader.AddOutput(videoReaderOutput); } catch { Console.WriteLine("video reader error: (error)"); //TODO - Chamar eventi de erro } //TODO: Verificar como passar parametro nil var audioSettings = new AudioSettings() { //EncoderBitRate = 64000, //Format = AudioToolbox.AudioFormatType., NumberChannels = 1, //SampleRate = 44100 }; var audioWriterInput = new AVAssetWriterInput(AVMediaType.Audio, audioSettings); audioWriterInput.ExpectsMediaDataInRealTime = false; videoWriter.AddInput(audioWriterInput); //setup audio reader var audioTrack = videoAsset.Tracks.FirstOrDefault(x => x.MediaType == AVMediaType.Audio); var audioReaderOutput = new AVAssetReaderTrackOutput(audioTrack, audioSettings); var audioReader = new AVAssetReader(videoAsset, out nSError); audioReader.AddOutput(audioReaderOutput); videoWriter.StartWriting(); } catch (Exception ex) { //TODO: Incluir tratamento de erro return(new Compression()); } return(new Compression()); }
void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack) { if (videoTrack == null) return; // Decompress to ARGB with the asset reader var decompSettings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV32BGRA, AllocateWithIOSurface = null }; AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings); assetReader.AddOutput (output); // Get the format description of the track, to fill in attributes of the video stream that we don't want to change var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault (); // Grab track dimensions from format description CGSize trackDimensions = formatDescription != null ? formatDescription.GetPresentationDimensions (false, false) : videoTrack.NaturalSize; // Grab clean aperture, pixel aspect ratio from format description AVVideoCodecSettings compressionSettings = null; if (formatDescription != null) { var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey); var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey); compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr); } // Compress to H.264 with the asset writer var videoSettings = new AVVideoSettingsCompressed { Codec = AVVideoCodec.H264, Width = (int)trackDimensions.Width, Height = (int)trackDimensions.Height, CodecSettings = compressionSettings }; AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings); input.Transform = videoTrack.PreferredTransform; assetWriter.AddInput (input); // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers videoSampleBufferChannel = new VideoChannel (output, input, transformer); }