Exemplo n.º 1
0
        /// <summary>
        /// Downloads the frame from the hardware into a software frame if possible.
        /// The input hardware frame gets freed and the return value will point to the new software frame.
        /// </summary>
        /// <param name="codecContext">The codec context.</param>
        /// <param name="input">The input frame coming from the decoder (may or may not be hardware).</param>
        /// <param name="isHardwareFrame">if set to <c>true</c> [comes from hardware] otherwise, hardware decoding was not performed.</param>
        /// <returns>
        /// The frame downloaded from the device into RAM.
        /// </returns>
        /// <exception cref="Exception">Failed to transfer data to output frame.</exception>
        public AVFrame *ExchangeFrame(AVCodecContext *codecContext, AVFrame *input, out bool isHardwareFrame)
        {
            isHardwareFrame = false;

            if (codecContext->hw_device_ctx == null)
            {
                return(input);
            }

            isHardwareFrame = true;

            if (input->format != (int)PixelFormat)
            {
                return(input);
            }

            var output = MediaFrame.CreateAVFrame();

            var result = ffmpeg.av_hwframe_transfer_data(output, input, 0);

            ffmpeg.av_frame_copy_props(output, input);
            if (result < 0)
            {
                MediaFrame.ReleaseAVFrame(output);
                throw new MediaContainerException("Failed to transfer data to output frame");
            }

            MediaFrame.ReleaseAVFrame(input);

            return(output);
        }
Exemplo n.º 2
0
        /// <inheritdoc />
        protected override MediaFrame CreateFrameSource(IntPtr framePointer)
        {
            // Validate the audio frame
            var frame = (AVFrame *)framePointer;

            if (framePointer == IntPtr.Zero || frame->channels <= 0 || frame->nb_samples <= 0 || frame->sample_rate <= 0)
            {
                return(null);
            }

            // Init the filter graph for the frame
            InitializeFilterGraph(frame);

            AVFrame *outputFrame;

            // Filter Graph can be changed by issuing a ChangeMedia command
            if (FilterGraph != null)
            {
                // Allocate the output frame
                outputFrame = MediaFrame.CloneAVFrame(frame);

                var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame);
                while (result >= 0)
                {
                    result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0);
                }

                if (outputFrame->nb_samples <= 0)
                {
                    // If we don't have a valid output frame simply release it and
                    // return the original input frame
                    MediaFrame.ReleaseAVFrame(outputFrame);
                    outputFrame = frame;
                }
                else
                {
                    // the output frame is the new valid frame (output frame).
                    // theretofore, we need to release the original
                    MediaFrame.ReleaseAVFrame(frame);
                }
            }
            else
            {
                outputFrame = frame;
            }

            // Check if the output frame is valid
            if (outputFrame->nb_samples <= 0)
            {
                return(null);
            }

            var frameHolder = new AudioFrame(outputFrame, this);

            return(frameHolder);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Adds a block to the playback blocks by converting the given frame.
        /// If there are no more blocks in the pool, the oldest block is returned to the pool
        /// and reused for the new block. The source frame is automatically disposed.
        /// </summary>
        /// <param name="source">The source.</param>
        /// <param name="container">The container.</param>
        /// <returns>The filled block.</returns>
        internal MediaBlock Add(MediaFrame source, MediaContainer container)
        {
            if (source == null)
            {
                return(null);
            }

            lock (SyncLock)
            {
                try
                {
                    // Check if we already have a block at the given time
                    if (IsInRange(source.StartTime) && source.HasValidStartTime)
                    {
                        var repeatedBlock = PlaybackBlocks.FirstOrDefault(f => f.StartTime.Ticks == source.StartTime.Ticks);
                        if (repeatedBlock != null)
                        {
                            PlaybackBlocks.Remove(repeatedBlock);
                            PoolBlocks.Enqueue(repeatedBlock);
                        }
                    }

                    // if there are no available blocks, make room!
                    if (PoolBlocks.Count <= 0)
                    {
                        // Remove the first block from playback
                        var firstBlock = PlaybackBlocks[0];
                        PlaybackBlocks.RemoveAt(0);
                        PoolBlocks.Enqueue(firstBlock);
                    }

                    // Get a block reference from the pool and convert it!
                    var targetBlock = PoolBlocks.Dequeue();
                    var lastBlock   = PlaybackBlocks.Count > 0 ? PlaybackBlocks[PlaybackBlocks.Count - 1] : null;

                    if (container.Convert(source, ref targetBlock, true, lastBlock) == false)
                    {
                        // return the converted block to the pool
                        PoolBlocks.Enqueue(targetBlock);
                        return(null);
                    }

                    // Add the target block to the playback blocks
                    PlaybackBlocks.Add(targetBlock);

                    // return the new target block
                    return(targetBlock);
                }
                finally
                {
                    // update collection-wide properties
                    UpdateCollectionProperties();
                }
            }
        }
Exemplo n.º 4
0
        /// <inheritdoc />
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock)
        {
            if (output == null)
            {
                output = new SubtitleBlock();
            }
            if (input is SubtitleFrame == false || output is SubtitleBlock == false)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            var source = (SubtitleFrame)input;
            var target = (SubtitleBlock)output;

            // Set the target data
            target.EndTime     = source.EndTime;
            target.StartTime   = source.StartTime;
            target.Duration    = source.Duration;
            target.StreamIndex = input.StreamIndex;

            // Process time offsets
            if (Delay != TimeSpan.Zero)
            {
                target.StartTime = TimeSpan.FromTicks(target.StartTime.Ticks + Delay.Ticks);
                target.EndTime   = TimeSpan.FromTicks(target.EndTime.Ticks + Delay.Ticks);
                target.Duration  = TimeSpan.FromTicks(target.EndTime.Ticks - target.StartTime.Ticks);
            }

            target.OriginalText.Clear();
            if (source.Text.Count > 0)
            {
                foreach (var t in source.Text)
                {
                    target.OriginalText.Add(t);
                }
            }

            target.OriginalTextType = source.TextType;

            target.Text.Clear();
            foreach (var text in source.Text)
            {
                if (string.IsNullOrWhiteSpace(text))
                {
                    continue;
                }

                if (source.TextType == AVSubtitleType.SUBTITLE_ASS)
                {
                    var strippedText = StripAssFormat(text);
                    if (string.IsNullOrWhiteSpace(strippedText) == false)
                    {
                        target.Text.Add(strippedText);
                    }
                }
                else
                {
                    var strippedText = StripSrtFormat(text);
                    if (string.IsNullOrWhiteSpace(strippedText) == false)
                    {
                        target.Text.Add(strippedText);
                    }
                }
            }

            // TODO: CompressedSize is just an estimate.
            // It would be better if we counted chars in all text lines.
            target.CompressedSize = source.CompressedSize;

            return(true);
        }
Exemplo n.º 5
0
        /// <inheritdoc />
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            if (input is AudioFrame == false || output is AudioBlock == false)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            var source = (AudioFrame)input;
            var target = (AudioBlock)output;

            // Create the source and target audio specs. We might need to scale from
            // the source to the target
            var sourceSpec = FFAudioParams.CreateSource(source.Pointer);
            var targetSpec = FFAudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || FFAudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(
                    Scaler,
                    targetSpec.ChannelLayout,
                    targetSpec.Format,
                    targetSpec.SampleRate,
                    sourceSpec.ChannelLayout,
                    sourceSpec.Format,
                    sourceSpec.SampleRate,
                    0,
                    null);

                RC.Current.Add(Scaler);
                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer and convert to stereo.
            int outputSamplesPerChannel;

            if (target.Allocate(targetSpec.BufferLength) &&
                target.TryAcquireWriterLock(out var writeLock))
            {
                using (writeLock)
                {
                    var outputBufferPtr = (byte *)target.Buffer;

                    // Execute the conversion (audio scaling). It will return the number of samples that were output
                    outputSamplesPerChannel = ffmpeg.swr_convert(
                        Scaler,
                        &outputBufferPtr,
                        targetSpec.SamplesPerChannel,
                        source.Pointer->extended_data,
                        source.Pointer->nb_samples);
                }
            }
            else
            {
                return(false);
            }

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && previousBlock != null)
            {
                // Get timing information from the previous block
                target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1);
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;
            }

            target.CompressedSize      = source.CompressedSize;
            target.SamplesBufferLength = outputBufferLength;
            target.ChannelCount        = targetSpec.ChannelCount;

            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;
            target.StreamIndex       = input.StreamIndex;

            return(true);
        }
Exemplo n.º 6
0
        /// <inheritdoc />
        protected override MediaFrame CreateFrameSource(IntPtr framePointer)
        {
            // Validate the video frame
            var frame = (AVFrame *)framePointer;

            if (framePointer == IntPtr.Zero || frame->width <= 0 || frame->height <= 0)
            {
                return(null);
            }

            // Move the frame from hardware (GPU) memory to RAM (CPU)
            if (HardwareAccelerator != null)
            {
                frame = HardwareAccelerator.ExchangeFrame(CodecContext, frame, out var isHardwareFrame);
                IsUsingHardwareDecoding = isHardwareFrame;
            }

            // Init the filter graph for the frame
            InitializeFilterGraph(frame);

            AVFrame *outputFrame;

            // Changes in the filter graph can be applied by calling the ChangeMedia command
            if (FilterGraph != null)
            {
                // Allocate the output frame
                outputFrame = MediaFrame.CloneAVFrame(frame);

                var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame);
                while (result >= 0)
                {
                    result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0);
                }

                if (outputFrame->width <= 0 || outputFrame->height <= 0)
                {
                    // If we don't have a valid output frame simply release it and
                    // return the original input frame
                    MediaFrame.ReleaseAVFrame(outputFrame);
                    outputFrame = frame;
                }
                else
                {
                    // the output frame is the new valid frame (output frame).
                    // therefore, we need to release the original
                    MediaFrame.ReleaseAVFrame(frame);
                }
            }
            else
            {
                outputFrame = frame;
            }

            // Check if the output frame is valid
            if (outputFrame->width <= 0 || outputFrame->height <= 0)
            {
                return(null);
            }

            // Create the frame holder object and return it.
            return(new VideoFrame(outputFrame, this));
        }
Exemplo n.º 7
0
        /// <inheritdoc />
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            if (input is VideoFrame == false || output is VideoBlock == false)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            var source = (VideoFrame)input;
            var target = (VideoBlock)output;

            // Retrieve a suitable scaler or create it on the fly
            var newScaler = ffmpeg.sws_getCachedContext(
                Scaler,
                source.Pointer->width,
                source.Pointer->height,
                NormalizePixelFormat(source.Pointer),
                source.Pointer->width,
                source.Pointer->height,
                Constants.VideoPixelFormat,
                ScalerFlags,
                null,
                null,
                null);

            // if it's the first time we set the scaler, simply assign it.
            if (Scaler == null)
            {
                Scaler = newScaler;
                RC.Current.Add(Scaler);
            }

            // Reassign to the new scaler and remove the reference to the existing one
            // The get cached context function automatically frees the existing scaler.
            if (Scaler != newScaler)
            {
                RC.Current.Remove(Scaler);
                Scaler = newScaler;
            }

            // Perform scaling and save the data to our unmanaged buffer pointer
            if (target.Allocate(source, Constants.VideoPixelFormat) &&
                target.TryAcquireWriterLock(out var writeLock))
            {
                using (writeLock)
                {
                    var targetStride = new[] { target.PictureBufferStride };
                    var targetScan   = default(byte_ptrArray8);
                    targetScan[0] = (byte *)target.Buffer;

                    // The scaling is done here
                    var outputHeight = ffmpeg.sws_scale(
                        Scaler,
                        source.Pointer->data,
                        source.Pointer->linesize,
                        0,
                        source.Pointer->height,
                        targetScan,
                        targetStride);

                    if (outputHeight <= 0)
                    {
                        return(false);
                    }
                }
            }
            else
            {
                return(false);
            }

            // After scaling, we need to copy and guess some of the block properties
            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && previousBlock != null)
            {
                // Get timing information from the previous block
                target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1);
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);

                // Guess picture number and SMTPE
                var frameRate = ffmpeg.av_guess_frame_rate(Container.InputContext, Stream, source.Pointer);
                target.DisplayPictureNumber = Utilities.ComputePictureNumber(StartTime, target.StartTime, frameRate);
                target.SmtpeTimeCode        = Utilities.ComputeSmtpeTimeCode(target.DisplayPictureNumber, frameRate);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;

                // Copy picture number and SMTPE
                target.DisplayPictureNumber = source.DisplayPictureNumber;
                target.SmtpeTimeCode        = source.SmtpeTimeCode;
            }

            // Fill out other properties
            target.IsHardwareFrame         = source.IsHardwareFrame;
            target.HardwareAcceleratorName = source.HardwareAcceleratorName;
            target.CompressedSize          = source.CompressedSize;
            target.CodedPictureNumber      = source.CodedPictureNumber;
            target.StreamIndex             = source.StreamIndex;
            target.ClosedCaptions          = source.ClosedCaptions.ToList();

            // Update the stream info object if we get Closed Caption Data
            if (StreamInfo.HasClosedCaptions == false && target.ClosedCaptions.Count > 0)
            {
                StreamInfo.HasClosedCaptions = true;
            }

            // Process the aspect ratio
            var aspectRatio = ffmpeg.av_guess_sample_aspect_ratio(Container.InputContext, Stream, source.Pointer);

            if (aspectRatio.num == 0 || aspectRatio.den == 0)
            {
                target.PixelAspectWidth  = 1;
                target.PixelAspectHeight = 1;
            }
            else
            {
                target.PixelAspectWidth  = aspectRatio.num;
                target.PixelAspectHeight = aspectRatio.den;
            }

            return(true);
        }