示例#1
0
        void ReadSampleBuffers(AVAsset asset)
        {
            NSError error;

            assetReader = AVAssetReader.FromAsset(asset, out error);

            if (error != null)
            {
                Console.WriteLine("Error creating Asset Reader: {0}", error.Description);
            }

            AVAssetTrack[] videoTracks = asset.TracksWithMediaType(AVMediaType.Video);
            AVAssetTrack   videoTrack  = videoTracks [0];

            CreateDecompressionSession(videoTrack);
            var videoTrackOutput = AVAssetReaderTrackOutput.Create(videoTrack, (AVVideoSettingsUncompressed)null);

            if (assetReader.CanAddOutput(videoTrackOutput))
            {
                assetReader.AddOutput(videoTrackOutput);
            }

            if (!assetReader.StartReading())
            {
                return;
            }

            while (assetReader.Status == AVAssetReaderStatus.Reading)
            {
                CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer();
                if (sampleBuffer != null)
                {
                    VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression;
                    VTDecodeInfoFlags  flagOut;
                    decompressionSession.DecodeFrame(sampleBuffer, flags, IntPtr.Zero, out flagOut);

                    sampleBuffer.Dispose();
                    if (presentationTimes.Count >= 5)
                    {
                        bufferSemaphore.Wait();
                    }
                }
                else if (assetReader.Status == AVAssetReaderStatus.Failed)
                {
                    Console.WriteLine("Asset Reader failed with error: {0}", assetReader.Error.Description);
                }
                else if (assetReader.Status == AVAssetReaderStatus.Completed)
                {
                    Console.WriteLine("Reached the end of the video.");
                    ChangeStatus();
                    ReadSampleBuffers(asset);
                }
            }
        }
        private void StartReadingAndWriting()
        {
            // Instruct the asset reader and asset writer to get ready to do work
            if (!_assetReader.StartReading())
            {
                throw new NSErrorException(_assetReader.Error);
            }

            if (!_assetWriter.StartWriting())
            {
                throw new NSErrorException(_assetWriter.Error);
            }

            // Start a sample-writing session
            _assetWriter.StartSessionAtSourceTime(_timeRange.Start);

            // Only set audio handler(obj-c delegate) for audio-only assets, else let the video channel drive progress
            AVReaderWriter audioHandler = _videoSampleBufferChannel == null ? this : null;
            var            audioTask    = StartReadingAsync(_audioSampleBufferChannel, audioHandler);
            var            videoTask    = StartReadingAsync(_videoSampleBufferChannel, this);

            // Set up a callback for when the sample writing is finished
            Task.WhenAll(audioTask, videoTask).ContinueWith(_ => {
                if (_cancellationTokenSrc.Token.IsCancellationRequested)
                {
                    _assetReader.CancelReading();
                    _assetWriter.CancelWriting();
                    throw new OperationCanceledException();
                }

                if (_assetReader.Status != AVAssetReaderStatus.Failed)
                {
                    _assetWriter.FinishWriting(() => {
                        bool success = _assetWriter.Status == AVAssetWriterStatus.Completed;
                        ReadingAndWritingDidFinish(success, _assetWriter.Error);
                    });
                }
            }, _cancellationTokenSrc.Token);
        }
示例#3
0
        void StartReadingAndWriting(CMTimeRange timeRange)
        {
            // Instruct the asset reader and asset writer to get ready to do work
            if (!assetReader.StartReading())
            {
                throw new NSErrorException(assetReader.Error);
            }

            if (!assetWriter.StartWriting())
            {
                throw new NSErrorException(assetWriter.Error);
            }

            // Start a sample-writing session
            assetWriter.StartSessionAtSourceTime(timeRange.Start);

            Task audioTask = Start(audioSampleBufferChannel);
            Task videoTask = Start(videoSampleBufferChannel);

            // Set up a callback for when the sample writing is finished
            Task.WhenAll(audioTask, videoTask).ContinueWith(_ => {
                if (cancellationTokenSrc.Token.IsCancellationRequested)
                {
                    assetReader.CancelReading();
                    assetWriter.CancelWriting();
                    throw new OperationCanceledException();
                }

                if (assetReader.Status != AVAssetReaderStatus.Failed)
                {
                    assetWriter.FinishWriting(() => {
                        bool success = assetWriter.Status == AVAssetWriterStatus.Completed;
                        ReadingAndWritingDidFinish(success, assetWriter.Error);
                    });
                }
            }, cancellationTokenSrc.Token);
        }
        void ReadSampleBuffers(AVAsset asset)
        {
            NSError error;
            assetReader = AVAssetReader.FromAsset (asset, out error);

            if (error != null)
                Console.WriteLine ("Error creating Asset Reader: {0}", error.Description);

            AVAssetTrack[] videoTracks = asset.TracksWithMediaType (AVMediaType.Video);
            AVAssetTrack videoTrack = videoTracks [0];
            CreateDecompressionSession (videoTrack);
            var videoTrackOutput = AVAssetReaderTrackOutput.Create (videoTrack, (AVVideoSettingsUncompressed)null);

            if (assetReader.CanAddOutput (videoTrackOutput))
                assetReader.AddOutput (videoTrackOutput);

            if (!assetReader.StartReading ())
                return;

            while (assetReader.Status == AVAssetReaderStatus.Reading) {
                CMSampleBuffer sampleBuffer = videoTrackOutput.CopyNextSampleBuffer ();
                if (sampleBuffer != null) {
                    VTDecodeFrameFlags flags = VTDecodeFrameFlags.EnableAsynchronousDecompression;
                    VTDecodeInfoFlags flagOut;
                    decompressionSession.DecodeFrame (sampleBuffer, flags, IntPtr.Zero, out flagOut);

                    sampleBuffer.Dispose ();
                    if (presentationTimes.Count >= 5)
                        bufferSemaphore.Wait ();

                } else if (assetReader.Status == AVAssetReaderStatus.Failed) {
                    Console.WriteLine ("Asset Reader failed with error: {0}", assetReader.Error.Description);
                } else if (assetReader.Status == AVAssetReaderStatus.Completed) {
                    Console.WriteLine("Reached the end of the video.");
                    ChangeStatus ();
                    ReadSampleBuffers (asset);
                }
            }
        }