void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput(output)) { assetReader.AddOutput(output); } AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput(input)) { assetWriter.AddInput(input); } // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
void ReadSampleBuffers(AVAsset asset) { NSError error; assetReader = AVAssetReader.FromAsset(asset, out error); if (error != null) { Console.WriteLine("Error creating Asset Reader: {0}", error.Description); } AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video); AVAssetTrack videoTrack = videoTracks [0]; CreateDecompressionSession(videoTrack); var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null); if (assetReader.CanAddOutput(videoTrackOutput)) { assetReader.AddOutput(videoTrackOutput); } if (!assetReader.StartReading()) { return; } while (assetReader.Status == AVAssetReaderStatus.Reading) { CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer(); if (sampleBuffer != null) { VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression; VTDecodeInfoFlags flagOut; decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut); sampleBuffer.Dispose(); if (presentationTimes.Count >= 5) { bufferSemaphore.Wait(); } } else if (assetReader.Status == AVAssetReaderStatus.Failed) { Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description); } else if (assetReader.Status == AVAssetReaderStatus.Completed) { Console.WriteLine("Reached the end of the video."); ChangeStatus(); ReadSampleBuffers(asset); } } }
void ReadSampleBuffers(AVAsset asset) { NSError error; assetReader = AVAssetReader.FromAsset (asset, out error); if (error != null) Console.WriteLine ("Error creating Asset Reader: {0}", error.Description); AVAssetTrack[] videoTracks = asset.TracksWithMediaType (AVMediaType.Video); AVAssetTrack videoTrack = videoTracks [0]; CreateDecompressionSession (videoTrack); var videoTrackOutput = AVAssetReaderTrackOutput.Create (videoTrack, (AVVideoSettingsUncompressed)null); if (assetReader.CanAddOutput (videoTrackOutput)) assetReader.AddOutput (videoTrackOutput); if (!assetReader.StartReading ()) return; while (assetReader.Status == AVAssetReaderStatus.Reading) { CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer (); if (sampleBuffer != null) { VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression; VTDecodeInfoFlags flagOut; decompressionSession.DecodeFrame (sampleBuffer, flags, IntPtr.Zero, out flagOut); sampleBuffer.Dispose (); if (presentationTimes.Count >= 5) bufferSemaphore.Wait (); } else if (assetReader.Status == AVAssetReaderStatus.Failed) { Console.WriteLine ("Asset Reader failed with error: {0}", assetReader.Error.Description); } else if (assetReader.Status == AVAssetReaderStatus.Completed) { Console.WriteLine("Reached the end of the video."); ChangeStatus (); ReadSampleBuffers (asset); } } }