void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput(output)) { assetReader.AddOutput(output); } AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput(input)) { assetWriter.AddInput(input); } // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
Task Start(ReadWriteSampleBufferChannel channel) { if (channel == null) { return(Task.FromResult <object> (null)); } else { return(channel.StartAsync()); } }
void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack) { if (videoTrack == null) { return; } // Decompress to ARGB with the asset reader var decompSettings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV32BGRA, AllocateWithIOSurface = null }; AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings); assetReader.AddOutput(output); // Get the format description of the track, to fill in attributes of the video stream that we don't want to change var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault(); // Grab track dimensions from format description CGSize trackDimensions = formatDescription != null ? formatDescription.GetPresentationDimensions(false, false) : videoTrack.NaturalSize; // Grab clean aperture, pixel aspect ratio from format description AVVideoCodecSettings compressionSettings = null; if (formatDescription != null) { var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey); var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey); compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr); } // Compress to H.264 with the asset writer var videoSettings = new AVVideoSettingsCompressed { Codec = AVVideoCodec.H264, Width = (int)trackDimensions.Width, Height = (int)trackDimensions.Height, CodecSettings = compressionSettings }; AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings); input.Transform = videoTrack.PreferredTransform; assetWriter.AddInput(input); // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers videoSampleBufferChannel = new VideoChannel(output, input, transformer); }
// TODO: where called in original sample // - (void)cancel:(id)sender private Task StartReadingAsync(ReadWriteSampleBufferChannel channel, AVReaderWriter handler) { var completionSrc = new TaskCompletionSource <object> (); if (channel == null) { completionSrc.SetResult(null); } else { channel.StartWithAsync(completionSrc, handler); } return(completionSrc.Task); }
public void DidReadSampleBuffer(ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer) { // Calculate progress (scale of 0.0 to 1.0) double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange); _progressProc((float)progress * 100); // Grab the pixel buffer from the sample buffer, if possible CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer(); var pixelBuffer = imageBuffer as CVPixelBuffer; if (pixelBuffer != null) { Delegate.AdjustPixelBuffer(pixelBuffer, null); // TODO: problem in original sample. No method } }
private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) { return; } // Decompress to Linear PCM with the asset reader AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null); _assetReader.AddOutput(output); AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null); _assetWriter.AddInput(input); // Create and save an instance of AAPLRWSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false); }
void ReadingAndWritingDidFinish(bool success, NSError error) { if (!success) { assetReader.CancelReading(); assetWriter.CancelWriting(); } // Tear down assetReader.Dispose(); assetReader = null; assetWriter.Dispose(); assetWriter = null; audioSampleBufferChannel = null; videoSampleBufferChannel = null; cancellationTokenSrc = null; completionProc(error); }
void ReadingAndWritingDidFinish (bool success, NSError error) { if (!success) { assetReader.CancelReading (); assetWriter.CancelWriting (); } // Tear down assetReader.Dispose (); assetReader = null; assetWriter.Dispose (); assetWriter = null; audioSampleBufferChannel = null; videoSampleBufferChannel = null; cancellationTokenSrc = null; completionProc (error); }
Task Start (ReadWriteSampleBufferChannel channel) { if (channel == null) return Task.FromResult<object> (null); else return channel.StartAsync (); }
void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack) { if (videoTrack == null) return; // Decompress to ARGB with the asset reader var decompSettings = new AVVideoSettingsUncompressed { PixelFormatType = CVPixelFormatType.CV32BGRA, AllocateWithIOSurface = null }; AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings); assetReader.AddOutput (output); // Get the format description of the track, to fill in attributes of the video stream that we don't want to change var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault (); // Grab track dimensions from format description CGSize trackDimensions = formatDescription != null ? formatDescription.GetPresentationDimensions (false, false) : videoTrack.NaturalSize; // Grab clean aperture, pixel aspect ratio from format description AVVideoCodecSettings compressionSettings = null; if (formatDescription != null) { var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey); var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey); compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr); } // Compress to H.264 with the asset writer var videoSettings = new AVVideoSettingsCompressed { Codec = AVVideoCodec.H264, Width = (int)trackDimensions.Width, Height = (int)trackDimensions.Height, CodecSettings = compressionSettings }; AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings); input.Transform = videoTrack.PreferredTransform; assetWriter.AddInput (input); // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers videoSampleBufferChannel = new VideoChannel (output, input, transformer); }
void SetupAssetReaderWriterForAudio (AVAssetTrack audioTrack) { if (audioTrack == null) return; // Decompress to Linear PCM with the asset reader // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter. AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null); if (assetReader.CanAddOutput (output)) assetReader.AddOutput (output); AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null); if (assetWriter.CanAddInput (input)) assetWriter.AddInput (input); // Create and save an instance of ReadWriteSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers audioSampleBufferChannel = new AudioChannel(output, input); }
public void DidReadAndWriteSampleBuffer (ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer, CVPixelBuffer sampleBufferForWrite) { // Calculate progress (scale of 0.0 to 1.0) double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange); _progressProc((float)progress * 100); // Grab the pixel buffer from the sample buffer, if possible CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer (); var pixelBuffer = imageBuffer as CVPixelBuffer; if (pixelBuffer != null) Delegate.AdjustPixelBuffer (pixelBuffer, sampleBufferForWrite); }
// TODO: where called in original sample // - (void)cancel:(id)sender private Task StartReadingAsync(ReadWriteSampleBufferChannel channel, AVReaderWriter handler) { var completionSrc = new TaskCompletionSource<object> (); if (channel == null) completionSrc.SetResult (null); else channel.StartWithAsync (completionSrc, handler); return completionSrc.Task; }
private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack) { if (audioTrack == null) return; // Decompress to Linear PCM with the asset reader AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null); _assetReader.AddOutput (output); AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null); _assetWriter.AddInput (input); // Create and save an instance of AAPLRWSampleBufferChannel, // which will coordinate the work of reading and writing sample buffers _audioSampleBufferChannel = new ReadWriteSampleBufferChannel (output, input, false); }