/// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new SubtitleBlock();
            }
            var source = input as SubtitleFrame;
            var target = output as SubtitleBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Set the target data
            target.EndTime   = source.EndTime;
            target.StartTime = source.StartTime;
            target.Duration  = source.Duration;
            target.Text.Clear();
            target.Text.AddRange(source.Text);

            return(target);
        }
Beispiel #2
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <param name="siblings">The sibling blocks that may help guess some additional parameters for the input frame.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            var source = input as AudioFrame;
            var target = output as AudioBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Create the source and target ausio specs. We might need to scale from
            // the source to the target
            var sourceSpec = AudioParams.CreateSource(source.Pointer);
            var targetSpec = AudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || AudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(
                    Scaler,
                    targetSpec.ChannelLayout,
                    targetSpec.Format,
                    targetSpec.SampleRate,
                    sourceSpec.ChannelLayout,
                    sourceSpec.Format,
                    sourceSpec.SampleRate,
                    0,
                    null);

                RC.Current.Add(Scaler, $"109: {nameof(AudioComponent)}.{nameof(MaterializeFrame)}()");
                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer
            if (target.AudioBufferLength != targetSpec.BufferLength)
            {
                if (target.AudioBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.AudioBuffer);
                }

                target.AudioBufferLength = targetSpec.BufferLength;
                target.AudioBuffer       = Marshal.AllocHGlobal(targetSpec.BufferLength);
            }

            var outputBufferPtr = (byte *)target.AudioBuffer;

            // Execute the conversion (audio scaling). It will return the number of samples that were output
            var outputSamplesPerChannel =
                ffmpeg.swr_convert(
                    Scaler,
                    &outputBufferPtr,
                    targetSpec.SamplesPerChannel,
                    source.Pointer->extended_data,
                    source.Pointer->nb_samples);

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && siblings != null && siblings.Count > 0)
            {
                // Get timing information from the last sibling
                var lastSibling = siblings[siblings.Count - 1];

                // We set the target properties
                target.StartTime = lastSibling.EndTime;
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : lastSibling.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;
            }

            target.BufferLength = outputBufferLength;
            target.ChannelCount = targetSpec.ChannelCount;

            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;
            target.StreamIndex       = input.StreamIndex;

            return(target);
        }
Beispiel #3
0
 /// <summary>
 /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
 /// The process includes performing picture, samples or text conversions
 /// so that the decoded source frame data is easily usable in multimedia applications
 /// </summary>
 /// <param name="input">The source frame to use as an input.</param>
 /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
 /// <param name="siblings">The sibling blocks that may help guess some additional parameters for the input frame.</param>
 /// <returns>
 /// Returns true of the operation succeeded. False otherwise.
 /// </returns>
 public abstract bool MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings);
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <param name="siblings">The siblings to help guess additional frame parameters.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            var source = input as VideoFrame;
            var target = output as VideoBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Retrieve a suitable scaler or create it on the fly
            Scaler = ffmpeg.sws_getCachedContext(
                Scaler,
                source.Pointer->width,
                source.Pointer->height,
                NormalizePixelFormat(source.Pointer),
                source.Pointer->width,
                source.Pointer->height,
                OutputPixelFormat,
                ScalerFlags,
                null,
                null,
                null);
            RC.Current.Add(Scaler, $"311: {nameof(VideoComponent)}.{nameof(MaterializeFrame)}()");

            // Perform scaling and save the data to our unmanaged buffer pointer
            var targetBufferStride = ffmpeg.av_image_get_linesize(OutputPixelFormat, source.Pointer->width, 0);
            var targetStride       = new int[] { targetBufferStride };
            var targetLength       = ffmpeg.av_image_get_buffer_size(OutputPixelFormat, source.Pointer->width, source.Pointer->height, 1);

            // Ensure proper allocation of the buffer
            // If there is a size mismatch between the wanted buffer length and the existing one,
            // then let's reallocate the buffer and set the new size (dispose of the existing one if any)
            if (target.PictureBufferLength != targetLength)
            {
                if (target.PictureBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.PictureBuffer);
                }

                target.PictureBufferLength = targetLength;
                target.PictureBuffer       = Marshal.AllocHGlobal(target.PictureBufferLength);
            }

            var targetScan = default(byte_ptrArray8);

            targetScan[0] = (byte *)target.PictureBuffer;

            // The scaling is done here
            var outputHeight = ffmpeg.sws_scale(Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride);

            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && siblings != null && siblings.Count > 0)
            {
                // Get timing information from the last sibling
                var lastSibling = siblings[siblings.Count - 1];

                // We set the target properties
                target.StartTime = lastSibling.EndTime;
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : lastSibling.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;
            }

            target.StreamIndex          = input.StreamIndex;
            target.SmtpeTimecode        = source.SmtpeTimecode;
            target.DisplayPictureNumber = source.DisplayPictureNumber;
            target.CodedPictureNumber   = source.DisplayPictureNumber;
            target.BufferStride         = targetStride[0];

            target.PixelHeight = source.Pointer->height;
            target.PixelWidth  = source.Pointer->width;

            var aspectRatio = source.Pointer->sample_aspect_ratio;

            if (aspectRatio.num == 0 || aspectRatio.den == 0)
            {
                target.AspectWidth  = 1;
                target.AspectHeight = 1;
            }
            else
            {
                target.AspectWidth  = aspectRatio.num;
                target.AspectHeight = aspectRatio.den;
            }

            return(target);
        }
Beispiel #5
0
 /// <summary>
 /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
 /// The process includes performing picture, samples or text conversions
 /// so that the decoded source frame data is easily usable in multimedia applications
 /// </summary>
 /// <param name="input">The source frame to use as an input.</param>
 /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
 /// <returns>Return the updated output frame</returns>
 internal abstract MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output);
Beispiel #6
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            var source = input as VideoFrame;
            var target = output as VideoBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Retrieve a suitable scaler or create it on the fly
            Scaler = ffmpeg.sws_getCachedContext(Scaler,
                                                 source.Pointer->width, source.Pointer->height, GetPixelFormat(source.Pointer),
                                                 source.Pointer->width, source.Pointer->height,
                                                 OutputPixelFormat, ScalerFlags, null, null, null);
            RC.Current.Add(Scaler, $"311: {nameof(VideoComponent)}.{nameof(MaterializeFrame)}()");

            // Perform scaling and save the data to our unmanaged buffer pointer
            var targetBufferStride = ffmpeg.av_image_get_linesize(OutputPixelFormat, source.Pointer->width, 0);
            var targetStride       = new int[] { targetBufferStride };
            var targetLength       = ffmpeg.av_image_get_buffer_size(OutputPixelFormat, source.Pointer->width, source.Pointer->height, 1);

            // Ensure proper allocation of the buffer
            // If there is a size mismatch between the wanted buffer length and the existing one,
            // then let's reallocate the buffer and set the new size (dispose of the existing one if any)
            if (target.PictureBufferLength != targetLength)
            {
                if (target.PictureBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.PictureBuffer);
                }

                target.PictureBufferLength = targetLength;
                target.PictureBuffer       = Marshal.AllocHGlobal(target.PictureBufferLength);
            }

            var targetScan = new byte_ptrArray8();

            targetScan[0] = (byte *)target.PictureBuffer;

            // The scaling is done here
            var outputHeight = ffmpeg.sws_scale(Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride);

            // We set the target properties
            target.EndTime      = source.EndTime;
            target.StartTime    = source.StartTime;
            target.BufferStride = targetStride[0];
            target.Duration     = source.Duration;
            target.PixelHeight  = source.Pointer->height;
            target.PixelWidth   = source.Pointer->width;

            var aspectRatio = source.Pointer->sample_aspect_ratio;

            if (aspectRatio.num == 0 || aspectRatio.den == 0)
            {
                target.AspectWidth  = 1;
                target.AspectHeight = 1;
            }
            else
            {
                target.AspectWidth  = aspectRatio.num;
                target.AspectHeight = aspectRatio.den;
            }

            return(target);
        }
Beispiel #7
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <param name="siblings">The sibling blocks that may help guess some additional parameters for the input frame.</param>
        /// <returns>
        /// Returns true if successful. False otherwise
        /// </returns>
        /// <exception cref="ArgumentNullException">input cannot be null</exception>
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings)
        {
            if (output == null)
            {
                output = new SubtitleBlock();
            }
            var source = input as SubtitleFrame;
            var target = output as SubtitleBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Set the target data
            target.EndTime     = source.EndTime;
            target.StartTime   = source.StartTime;
            target.Duration    = source.Duration;
            target.StreamIndex = input.StreamIndex;

            // Process time offsets
            if (Delay != TimeSpan.Zero)
            {
                target.StartTime = TimeSpan.FromTicks(target.StartTime.Ticks + Delay.Ticks);
                target.EndTime   = TimeSpan.FromTicks(target.EndTime.Ticks + Delay.Ticks);
                target.Duration  = TimeSpan.FromTicks(target.EndTime.Ticks - target.StartTime.Ticks);
            }

            target.OriginalText.Clear();
            if (source.Text.Count > 0)
            {
                target.OriginalText.AddRange(source.Text);
            }
            target.OriginalTextType = source.TextType;

            target.Text.Clear();
            foreach (var text in source.Text)
            {
                if (string.IsNullOrWhiteSpace(text))
                {
                    continue;
                }

                if (source.TextType == AVSubtitleType.SUBTITLE_ASS)
                {
                    var strippedText = StripAssFormat(text);
                    if (string.IsNullOrWhiteSpace(strippedText) == false)
                    {
                        target.Text.Add(strippedText);
                    }
                }
                else
                {
                    var strippedText = StripSrtFormat(text);
                    if (string.IsNullOrWhiteSpace(strippedText) == false)
                    {
                        target.Text.Add(strippedText);
                    }
                }
            }

            // TODO: CompressedSize is just an estimate.
            // It would be better if we counted chars in all text lines.
            target.CompressedSize = source.CompressedSize;

            return(true);
        }
        /// <inheritdoc />
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            if (input is VideoFrame == false || output is VideoBlock == false)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            var source = (VideoFrame)input;
            var target = (VideoBlock)output;

            // Retrieve a suitable scaler or create it on the fly
            var newScaler = ffmpeg.sws_getCachedContext(
                Scaler,
                source.Pointer->width,
                source.Pointer->height,
                NormalizePixelFormat(source.Pointer),
                source.Pointer->width,
                source.Pointer->height,
                Constants.Video.VideoPixelFormat,
                ScalerFlags,
                null,
                null,
                null);

            // if it's the first time we set the scaler, simply assign it.
            if (Scaler == null)
            {
                Scaler = newScaler;
                RC.Current.Add(Scaler);
            }

            // Reassign to the new scaler and remove the reference to the existing one
            // The get cached context function automatically frees the existing scaler.
            if (Scaler != newScaler)
            {
                RC.Current.Remove(Scaler);
                Scaler = newScaler;
            }

            // Perform scaling and save the data to our unmanaged buffer pointer
            if (target.Allocate(source, Constants.Video.VideoPixelFormat) &&
                target.TryAcquireWriterLock(out var writeLock))
            {
                using (writeLock)
                {
                    var targetStride = new[] { target.PictureBufferStride };
                    var targetScan   = default(byte_ptrArray8);
                    targetScan[0] = (byte *)target.Buffer;

                    // The scaling is done here
                    var outputHeight = ffmpeg.sws_scale(
                        Scaler,
                        source.Pointer->data,
                        source.Pointer->linesize,
                        0,
                        source.Pointer->height,
                        targetScan,
                        targetStride);

                    if (outputHeight <= 0)
                    {
                        return(false);
                    }
                }
            }
            else
            {
                return(false);
            }

            // After scaling, we need to copy and guess some of the block properties
            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && previousBlock != null)
            {
                // Get timing information from the previous block
                target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1);
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);

                // Guess picture number and SMTPE
                var timeBase = ffmpeg.av_guess_frame_rate(Container.InputContext, Stream, source.Pointer);
                target.DisplayPictureNumber = Extensions.ComputePictureNumber(target.StartTime, target.Duration, 1);
                target.SmtpeTimeCode        = Extensions.ComputeSmtpeTimeCode(StartTime, target.Duration, timeBase, target.DisplayPictureNumber);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;

                // Copy picture number and SMTPE
                target.DisplayPictureNumber = source.DisplayPictureNumber;
                target.SmtpeTimeCode        = source.SmtpeTimeCode;
            }

            // Fill out other properties
            target.IsHardwareFrame         = source.IsHardwareFrame;
            target.HardwareAcceleratorName = source.HardwareAcceleratorName;
            target.CompressedSize          = source.CompressedSize;
            target.CodedPictureNumber      = source.CodedPictureNumber;
            target.StreamIndex             = source.StreamIndex;
            target.ClosedCaptions          = new ReadOnlyCollection <ClosedCaptionPacket>(source.ClosedCaptions);

            // Update the stream info object if we get Closed Caption Data
            if (StreamInfo.HasClosedCaptions == false && target.ClosedCaptions.Count > 0)
            {
                StreamInfo.HasClosedCaptions = true;
            }

            // Process the aspect ratio
            var aspectRatio = ffmpeg.av_guess_sample_aspect_ratio(Container.InputContext, Stream, source.Pointer);

            if (aspectRatio.num == 0 || aspectRatio.den == 0)
            {
                target.PixelAspectWidth  = 1;
                target.PixelAspectHeight = 1;
            }
            else
            {
                target.PixelAspectWidth  = aspectRatio.num;
                target.PixelAspectHeight = aspectRatio.den;
            }

            return(true);
        }
Beispiel #9
0
        /// <inheritdoc />
        public override bool MaterializeFrame(MediaFrame input, ref MediaBlock output, MediaBlock previousBlock)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            if (input is AudioFrame == false || output is AudioBlock == false)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            var source = (AudioFrame)input;
            var target = (AudioBlock)output;

            // Create the source and target audio specs. We might need to scale from
            // the source to the target
            var sourceSpec = FFAudioParams.CreateSource(source.Pointer);
            var targetSpec = FFAudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || FFAudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(
                    Scaler,
                    targetSpec.ChannelLayout,
                    targetSpec.Format,
                    targetSpec.SampleRate,
                    sourceSpec.ChannelLayout,
                    sourceSpec.Format,
                    sourceSpec.SampleRate,
                    0,
                    null);

                RC.Current.Add(Scaler);
                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer and convert to stereo.
            int outputSamplesPerChannel;

            if (target.Allocate(targetSpec.BufferLength) &&
                target.TryAcquireWriterLock(out var writeLock))
            {
                using (writeLock)
                {
                    var outputBufferPtr = (byte *)target.Buffer;

                    // Execute the conversion (audio scaling). It will return the number of samples that were output
                    outputSamplesPerChannel = ffmpeg.swr_convert(
                        Scaler,
                        &outputBufferPtr,
                        targetSpec.SamplesPerChannel,
                        source.Pointer->extended_data,
                        source.Pointer->nb_samples);
                }
            }
            else
            {
                return(false);
            }

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && previousBlock != null)
            {
                // Get timing information from the previous block
                target.StartTime = TimeSpan.FromTicks(previousBlock.EndTime.Ticks + 1);
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : previousBlock.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;
            }

            target.CompressedSize      = source.CompressedSize;
            target.SamplesBufferLength = outputBufferLength;
            target.ChannelCount        = targetSpec.ChannelCount;

            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;
            target.StreamIndex       = input.StreamIndex;

            return(true);
        }
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <param name="siblings">The siblings to help guess additional frame parameters.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="ArgumentNullException">input</exception>
        public override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output, List <MediaBlock> siblings)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            var source = input as VideoFrame;
            var target = output as VideoBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Retrieve a suitable scaler or create it on the fly
            var newScaler = ffmpeg.sws_getCachedContext(
                Scaler,
                source.Pointer->width,
                source.Pointer->height,
                NormalizePixelFormat(source.Pointer),
                source.Pointer->width,
                source.Pointer->height,
                Constants.Video.VideoPixelFormat,
                ScalerFlags,
                null,
                null,
                null);

            // if it's the first time we set the scaler, simply assign it.
            if (Scaler == null)
            {
                Scaler = newScaler;
                RC.Current.Add(Scaler, $"311: {nameof(VideoComponent)}.{nameof(MaterializeFrame)}()");
            }

            // Reassign to the new scaler and remove the reference to the existing one
            // The get cached context function automatically frees the existing scaler.
            if (Scaler != newScaler)
            {
                RC.Current.Remove(Scaler);
                Scaler = newScaler;
            }

            // Perform scaling and save the data to our unmanaged buffer pointer
            target.EnsureAllocated(source, Constants.Video.VideoPixelFormat);
            var targetStride = new int[] { target.PictureBufferStride };
            var targetScan   = default(byte_ptrArray8);

            targetScan[0] = (byte *)target.PictureBuffer;

            // The scaling is done here
            var outputHeight = ffmpeg.sws_scale(
                Scaler,
                source.Pointer->data,
                source.Pointer->linesize,
                0,
                source.Pointer->height,
                targetScan,
                targetStride);

            // After scaling, we need to copy and guess some of the block properties
            // Flag the block if we have to
            target.IsStartTimeGuessed = source.HasValidStartTime == false;

            // Try to fix the start time, duration and End time if we don't have valid data
            if (source.HasValidStartTime == false && siblings != null && siblings.Count > 0)
            {
                // Get timing information from the last sibling
                var lastSibling = siblings[siblings.Count - 1];

                // We set the target properties
                target.StartTime = lastSibling.EndTime;
                target.Duration  = source.Duration.Ticks > 0 ? source.Duration : lastSibling.Duration;
                target.EndTime   = TimeSpan.FromTicks(target.StartTime.Ticks + target.Duration.Ticks);

                // Guess picture number and SMTPE
                var timeBase = ffmpeg.av_guess_frame_rate(Container.InputContext, Stream, source.Pointer);
                target.DisplayPictureNumber = Extensions.ComputePictureNumber(target.StartTime, target.Duration, 1);
                target.SmtpeTimecode        = Extensions.ComputeSmtpeTimeCode(StartTimeOffset, target.Duration, timeBase, target.DisplayPictureNumber);
            }
            else
            {
                // We set the target properties directly from the source
                target.StartTime = source.StartTime;
                target.Duration  = source.Duration;
                target.EndTime   = source.EndTime;

                // Copy picture number and SMTPE
                target.DisplayPictureNumber = source.DisplayPictureNumber;
                target.SmtpeTimecode        = source.SmtpeTimecode;
            }

            // Fill out other properties
            target.CodedPictureNumber = source.CodedPictureNumber;
            target.StreamIndex        = source.StreamIndex;
            target.ClosedCaptions     = new ReadOnlyCollection <ClosedCaptions.ClosedCaptionPacket>(source.ClosedCaptions);

            // Process the aspect ratio
            var aspectRatio = source.Pointer->sample_aspect_ratio;

            if (aspectRatio.num == 0 || aspectRatio.den == 0)
            {
                target.AspectWidth  = 1;
                target.AspectHeight = 1;
            }
            else
            {
                target.AspectWidth  = aspectRatio.num;
                target.AspectHeight = aspectRatio.den;
            }

            return(target);
        }
Beispiel #11
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            var source = input as AudioFrame;
            var target = output as AudioBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Create the source and target ausio specs. We might need to scale from
            // the source to the target
            var sourceSpec = AudioParams.CreateSource(source.Pointer);
            var targetSpec = AudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || AudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(Scaler, targetSpec.ChannelLayout, targetSpec.Format, targetSpec.SampleRate,
                                                   sourceSpec.ChannelLayout, sourceSpec.Format, sourceSpec.SampleRate, 0, null);

                RC.Current.Add(Scaler, $"109: {nameof(AudioComponent)}.{nameof(MaterializeFrame)}()");
                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer
            if (target.AudioBufferLength != targetSpec.BufferLength)
            {
                if (target.AudioBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.AudioBuffer);
                }

                target.AudioBufferLength = targetSpec.BufferLength;
                target.AudioBuffer       = Marshal.AllocHGlobal(targetSpec.BufferLength);
            }

            var outputBufferPtr = (byte *)target.AudioBuffer;

            // Execute the conversion (audio scaling). It will return the number of samples that were output
            var outputSamplesPerChannel =
                ffmpeg.swr_convert(Scaler, &outputBufferPtr, targetSpec.SamplesPerChannel,
                                   source.Pointer->extended_data, source.Pointer->nb_samples);

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // set the target properties
            target.StartTime         = source.StartTime;
            target.EndTime           = source.EndTime;
            target.BufferLength      = outputBufferLength;
            target.ChannelCount      = targetSpec.ChannelCount;
            target.Duration          = source.Duration;
            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;

            return(target);
        }