예제 #1
0
        void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            // To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter.
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            if (assetReader.CanAddOutput(output))
            {
                assetReader.AddOutput(output);
            }

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            if (assetWriter.CanAddInput(input))
            {
                assetWriter.AddInput(input);
            }

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            audioSampleBufferChannel = new AudioChannel(output, input);
        }
예제 #2
0
 Task Start(ReadWriteSampleBufferChannel channel)
 {
     if (channel == null)
     {
         return(Task.FromResult <object> (null));
     }
     else
     {
         return(channel.StartAsync());
     }
 }
예제 #3
0
        void SetupAssetReaserWriterForVideo(AVAssetTrack videoTrack)
        {
            if (videoTrack == null)
            {
                return;
            }

            // Decompress to ARGB with the asset reader
            var decompSettings = new AVVideoSettingsUncompressed {
                PixelFormatType       = CVPixelFormatType.CV32BGRA,
                AllocateWithIOSurface = null
            };
            AVAssetReaderOutput output = new AVAssetReaderTrackOutput(videoTrack, decompSettings);

            assetReader.AddOutput(output);

            // Get the format description of the track, to fill in attributes of the video stream that we don't want to change
            var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault();
            // Grab track dimensions from format description
            CGSize trackDimensions = formatDescription != null
                                ? formatDescription.GetPresentationDimensions(false, false)
                                : videoTrack.NaturalSize;

            // Grab clean aperture, pixel aspect ratio from format description
            AVVideoCodecSettings compressionSettings = null;

            if (formatDescription != null)
            {
                var cleanApertureDescr    = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.CleanApertureKey);
                var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension(CVImageBuffer.PixelAspectRatioKey);
                compressionSettings = CreateCodecSettingsFor(cleanApertureDescr, pixelAspectRatioDescr);
            }

            // Compress to H.264 with the asset writer
            var videoSettings = new AVVideoSettingsCompressed {
                Codec         = AVVideoCodec.H264,
                Width         = (int)trackDimensions.Width,
                Height        = (int)trackDimensions.Height,
                CodecSettings = compressionSettings
            };
            AVAssetWriterInput input = new AVAssetWriterInput(videoTrack.MediaType, videoSettings);

            input.Transform = videoTrack.PreferredTransform;
            assetWriter.AddInput(input);

            // Create and save an instance of ReadWriteSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            videoSampleBufferChannel = new VideoChannel(output, input, transformer);
        }
예제 #4
0
        // TODO: where called in original sample
        // - (void)cancel:(id)sender

        private Task StartReadingAsync(ReadWriteSampleBufferChannel channel, AVReaderWriter handler)
        {
            var completionSrc = new TaskCompletionSource <object> ();

            if (channel == null)
            {
                completionSrc.SetResult(null);
            }
            else
            {
                channel.StartWithAsync(completionSrc, handler);
            }

            return(completionSrc.Task);
        }
예제 #5
0
        public void DidReadSampleBuffer(ReadWriteSampleBufferChannel sampleBufferChannel, CMSampleBuffer sampleBuffer)
        {
            // Calculate progress (scale of 0.0 to 1.0)
            double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange);

            _progressProc((float)progress * 100);

            // Grab the pixel buffer from the sample buffer, if possible
            CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer();

            var pixelBuffer = imageBuffer as CVPixelBuffer;

            if (pixelBuffer != null)
            {
                Delegate.AdjustPixelBuffer(pixelBuffer, null);                  // TODO: problem in original sample. No method
            }
        }
예제 #6
0
        private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
        {
            if (audioTrack == null)
            {
                return;
            }

            // Decompress to Linear PCM with the asset reader
            AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create(audioTrack, (AudioSettings)null);

            _assetReader.AddOutput(output);

            AVAssetWriterInput input = AVAssetWriterInput.Create(audioTrack.MediaType, (AudioSettings)null);

            _assetWriter.AddInput(input);

            // Create and save an instance of AAPLRWSampleBufferChannel,
            // which will coordinate the work of reading and writing sample buffers
            _audioSampleBufferChannel = new ReadWriteSampleBufferChannel(output, input, false);
        }
예제 #7
0
        void ReadingAndWritingDidFinish(bool success, NSError error)
        {
            if (!success)
            {
                assetReader.CancelReading();
                assetWriter.CancelWriting();
            }

            // Tear down
            assetReader.Dispose();
            assetReader = null;

            assetWriter.Dispose();
            assetWriter = null;

            audioSampleBufferChannel = null;
            videoSampleBufferChannel = null;
            cancellationTokenSrc     = null;

            completionProc(error);
        }
예제 #8
0
		void ReadingAndWritingDidFinish (bool success, NSError error)
		{
			if (!success) {
				assetReader.CancelReading ();
				assetWriter.CancelWriting ();
			}

			// Tear down
			assetReader.Dispose ();
			assetReader = null;

			assetWriter.Dispose ();
			assetWriter = null;

			audioSampleBufferChannel = null;
			videoSampleBufferChannel = null;
			cancellationTokenSrc = null;

			completionProc (error);
		}
예제 #9
0
		Task Start (ReadWriteSampleBufferChannel channel)
		{
			if (channel == null)
				return Task.FromResult<object> (null);
			else
				return channel.StartAsync ();
		}
예제 #10
0
		void SetupAssetReaserWriterForVideo (AVAssetTrack videoTrack)
		{
			if (videoTrack == null)
				return;

			// Decompress to ARGB with the asset reader
			var decompSettings = new AVVideoSettingsUncompressed {
				PixelFormatType = CVPixelFormatType.CV32BGRA,
				AllocateWithIOSurface = null
			};
			AVAssetReaderOutput output = new AVAssetReaderTrackOutput (videoTrack, decompSettings);
			assetReader.AddOutput (output);

			// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
			var formatDescription = (CMVideoFormatDescription)videoTrack.FormatDescriptions.FirstOrDefault ();
			// Grab track dimensions from format description
			CGSize trackDimensions = formatDescription != null
				? formatDescription.GetPresentationDimensions (false, false)
				: videoTrack.NaturalSize;

			// Grab clean aperture, pixel aspect ratio from format description
			AVVideoCodecSettings compressionSettings = null;
			if (formatDescription != null) {
				var cleanApertureDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.CleanApertureKey);
				var pixelAspectRatioDescr = (NSDictionary)formatDescription.GetExtension (CVImageBuffer.PixelAspectRatioKey);
				compressionSettings = CreateCodecSettingsFor (cleanApertureDescr, pixelAspectRatioDescr);
			}

			// Compress to H.264 with the asset writer
			var videoSettings = new AVVideoSettingsCompressed {
				Codec = AVVideoCodec.H264,
				Width = (int)trackDimensions.Width,
				Height = (int)trackDimensions.Height,
				CodecSettings = compressionSettings
			};
			AVAssetWriterInput input = new AVAssetWriterInput (videoTrack.MediaType, videoSettings);
			input.Transform = videoTrack.PreferredTransform;
			assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			videoSampleBufferChannel = new VideoChannel (output, input, transformer);
		}
예제 #11
0
		void SetupAssetReaderWriterForAudio (AVAssetTrack audioTrack)
		{
			if (audioTrack == null)
				return;

			// Decompress to Linear PCM with the asset reader
			// To read the media data from a specific asset track in the format in which it was stored, pass null to the settings parameter.
			AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null);
			if (assetReader.CanAddOutput (output))
				assetReader.AddOutput (output);

			AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null);
			if (assetWriter.CanAddInput (input))
				assetWriter.AddInput (input);

			// Create and save an instance of ReadWriteSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			audioSampleBufferChannel = new AudioChannel(output, input);
		}
예제 #12
0
		public void DidReadAndWriteSampleBuffer (ReadWriteSampleBufferChannel sampleBufferChannel,
			CMSampleBuffer sampleBuffer,
			CVPixelBuffer sampleBufferForWrite)
		{
			// Calculate progress (scale of 0.0 to 1.0)
			double progress = AVReaderWriter.ProgressOfSampleBufferInTimeRange(sampleBuffer, _timeRange);
			_progressProc((float)progress * 100);

			// Grab the pixel buffer from the sample buffer, if possible
			CVImageBuffer imageBuffer = sampleBuffer.GetImageBuffer ();
			var pixelBuffer = imageBuffer as CVPixelBuffer;

			if (pixelBuffer != null)
				Delegate.AdjustPixelBuffer (pixelBuffer, sampleBufferForWrite);
		}
예제 #13
0
		// TODO: where called in original sample
		// - (void)cancel:(id)sender

		private Task StartReadingAsync(ReadWriteSampleBufferChannel channel, AVReaderWriter handler)
		{
			var completionSrc = new TaskCompletionSource<object> ();

			if (channel == null)
				completionSrc.SetResult (null);
			else
				channel.StartWithAsync (completionSrc, handler);

			return completionSrc.Task;
		}
예제 #14
0
		private void SetupAssetReaderWriterForAudio(AVAssetTrack audioTrack)
		{
			if (audioTrack == null)
				return;

			// Decompress to Linear PCM with the asset reader
			AVAssetReaderOutput output = AVAssetReaderTrackOutput.Create (audioTrack, (AudioSettings)null);
			_assetReader.AddOutput (output);

			AVAssetWriterInput input = AVAssetWriterInput.Create (audioTrack.MediaType, (AudioSettings)null);
			_assetWriter.AddInput (input);

			// Create and save an instance of AAPLRWSampleBufferChannel,
			// which will coordinate the work of reading and writing sample buffers
			_audioSampleBufferChannel = new ReadWriteSampleBufferChannel (output, input, false);
		}