Пример #1
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new VideoBlock();
            }
            var source = input as VideoFrame;
            var target = output as VideoBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Retrieve a suitable scaler or create it on the fly
            Scaler = ffmpeg.sws_getCachedContext(Scaler,
                                                 source.Pointer->width, source.Pointer->height, GetPixelFormat(source.Pointer),
                                                 source.Pointer->width, source.Pointer->height,
                                                 OutputPixelFormat, ScalerFlags, null, null, null);

            // Perform scaling and save the data to our unmanaged buffer pointer
            var targetBufferStride = ffmpeg.av_image_get_linesize(OutputPixelFormat, source.Pointer->width, 0);
            var targetStride       = new int[] { targetBufferStride };
            var targetLength       = ffmpeg.av_image_get_buffer_size(OutputPixelFormat, source.Pointer->width, source.Pointer->height, 1);

            // Ensure proper allocation of the buffer
            // If there is a size mismatch between the wanted buffer length and the existing one,
            // then let's reallocate the buffer and set the new size (dispose of the existing one if any)
            if (target.PictureBufferLength != targetLength)
            {
                if (target.PictureBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.PictureBuffer);
                }

                target.PictureBufferLength = targetLength;
                target.PictureBuffer       = Marshal.AllocHGlobal(target.PictureBufferLength);
            }

            var targetScan = new byte_ptrArray8();

            targetScan[0] = (byte *)target.PictureBuffer;

            // The scaling is done here
            var outputHeight = ffmpeg.sws_scale(Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride);

            // We set the target properties
            target.EndTime      = source.EndTime;
            target.StartTime    = source.StartTime;
            target.BufferStride = targetStride[0];
            target.Duration     = source.Duration;
            target.PixelHeight  = source.Pointer->height;
            target.PixelWidth   = source.Pointer->width;


            return(target);
        }
Пример #2
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new SubtitleBlock();
            }
            var source = input as SubtitleFrame;
            var target = output as SubtitleBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Set the target data
            target.EndTime   = source.EndTime;
            target.StartTime = source.StartTime;
            target.Duration  = source.Duration;
            target.Text.Clear();
            target.Text.AddRange(source.Text);

            return(target);
        }
Пример #3
0
        /// <summary>
        /// Adds a block to the playback blocks by converting the given frame.
        /// If there are no more blocks in the pool, the oldest block is returned to the pool
        /// and reused for the new block. The source frame is automatically disposed.
        /// </summary>
        /// <param name="source">The source.</param>
        /// <param name="container">The container.</param>
        public MediaBlock Add(MediaFrame source, MediaContainer container)
        {
            lock (SyncRoot)
            {
                // Check if we already have a block at the given time
                if (IsInRange(source.StartTime))
                {
                    var reapeatedBlock = PlaybackBlocks.FirstOrDefault(f => f.StartTime.Ticks == source.StartTime.Ticks);
                    if (reapeatedBlock != null)
                    {
                        PlaybackBlocks.Remove(reapeatedBlock);
                        PoolBlocks.Enqueue(reapeatedBlock);
                    }
                }

                // if there are no available blocks, make room!
                if (PoolBlocks.Count <= 0)
                {
                    var firstBlock = PlaybackBlocks[0];
                    PlaybackBlocks.RemoveAt(0);
                    PoolBlocks.Enqueue(firstBlock);
                }

                // Get a block reference from the pool and convert it!
                var targetBlock = PoolBlocks.Dequeue();
                {
                    var target = targetBlock as MediaBlock;
                    container.Convert(source, ref target, true);
                }

                // Add the converted block to the playback list and sort it.
                PlaybackBlocks.Add(targetBlock);
                PlaybackBlocks.Sort();
                return(targetBlock);
            }
        }
Пример #4
0
 /// <summary>
 /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
 /// The process includes performing picture, samples or text conversions
 /// so that the decoded source frame data is easily usable in multimedia applications
 /// </summary>
 /// <param name="input">The source frame to use as an input.</param>
 /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
 /// <returns>Return the updated output frame</returns>
 internal abstract MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output);
Пример #5
0
        /// <summary>
        /// Converts decoded, raw frame data in the frame source into a a usable frame. <br />
        /// The process includes performing picture, samples or text conversions
        /// so that the decoded source frame data is easily usable in multimedia applications
        /// </summary>
        /// <param name="input">The source frame to use as an input.</param>
        /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param>
        /// <returns>
        /// Return the updated output frame
        /// </returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output)
        {
            if (output == null)
            {
                output = new AudioBlock();
            }
            var source = input as AudioFrame;
            var target = output as AudioBlock;

            if (source == null || target == null)
            {
                throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'");
            }

            // Create the source and target ausio specs. We might need to scale from
            // the source to the target
            var sourceSpec = AudioParams.CreateSource(source.Pointer);
            var targetSpec = AudioParams.CreateTarget(source.Pointer);

            // Initialize or update the audio scaler if required
            if (Scaler == null || LastSourceSpec == null || AudioParams.AreCompatible(LastSourceSpec, sourceSpec) == false)
            {
                Scaler = ffmpeg.swr_alloc_set_opts(Scaler, targetSpec.ChannelLayout, targetSpec.Format, targetSpec.SampleRate,
                                                   sourceSpec.ChannelLayout, sourceSpec.Format, sourceSpec.SampleRate, 0, null);

                ffmpeg.swr_init(Scaler);
                LastSourceSpec = sourceSpec;
            }

            // Allocate the unmanaged output buffer
            if (target.AudioBufferLength != targetSpec.BufferLength)
            {
                if (target.AudioBuffer != IntPtr.Zero)
                {
                    Marshal.FreeHGlobal(target.AudioBuffer);
                }

                target.AudioBufferLength = targetSpec.BufferLength;
                target.AudioBuffer       = Marshal.AllocHGlobal(targetSpec.BufferLength);
            }

            var outputBufferPtr = (byte *)target.AudioBuffer;

            // Execute the conversion (audio scaling). It will return the number of samples that were output
            var outputSamplesPerChannel =
                ffmpeg.swr_convert(Scaler, &outputBufferPtr, targetSpec.SamplesPerChannel,
                                   source.Pointer->extended_data, source.Pointer->nb_samples);

            // Compute the buffer length
            var outputBufferLength =
                ffmpeg.av_samples_get_buffer_size(null, targetSpec.ChannelCount, outputSamplesPerChannel, targetSpec.Format, 1);

            // set the target properties
            target.StartTime         = source.StartTime;
            target.EndTime           = source.EndTime;
            target.BufferLength      = outputBufferLength;
            target.ChannelCount      = targetSpec.ChannelCount;
            target.Duration          = source.Duration;
            target.SampleRate        = targetSpec.SampleRate;
            target.SamplesPerChannel = outputSamplesPerChannel;

            return(target);
        }
Пример #6
0
        /// <summary>
        /// Performs audio, video and subtitle conversions on the decoded input frame so data
        /// can be used as a Frame. Please note that if the output is passed as a reference.
        /// This works as follows: if the output reference is null it will be automatically instantiated
        /// and returned by this function. This enables to  either instantiate or reuse a previously allocated Frame.
        /// This is important because buffer allocations are exepnsive operations and this allows you
        /// to perform the allocation once and continue reusing thae same buffer.
        /// </summary>
        /// <param name="input">The raw frame source. Has to be compatiable with the target. (e.g. use VideoFrameSource to conver to VideoFrame)</param>
        /// <param name="output">The target frame. Has to be compatible with the source.</param>
        /// <param name="releaseInput">if set to <c>true</c> releases the raw frame source from unmanaged memory.</param>
        /// <returns></returns>
        /// <exception cref="System.ArgumentNullException">input</exception>
        /// <exception cref="System.ArgumentException">
        /// input
        /// or
        /// input
        /// </exception>
        /// <exception cref="MediaContainerException">MediaType</exception>
        public MediaBlock Convert(MediaFrame input, ref MediaBlock output, bool releaseInput = true)
        {
            lock (ConvertSyncRoot)
            {
                if (IsDisposed || InputContext == null)
                {
                    throw new InvalidOperationException("No input context initialized");
                }

                // Check the input parameters
                if (input == null)
                {
                    throw new ArgumentNullException($"{nameof(input)} cannot be null.");
                }

                try
                {
                    switch (input.MediaType)
                    {
                    case MediaType.Video:
                        if (input.IsStale)
                        {
                            throw new ArgumentException(
                                      $"The {nameof(input)} {nameof(MediaFrame)} has already been released (it's stale).");
                        }

                        if (Components.HasVideo)
                        {
                            Components.Video.MaterializeFrame(input, ref output);
                        }
                        return(output);

                    case MediaType.Audio:
                        if (input.IsStale)
                        {
                            throw new ArgumentException(
                                      $"The {nameof(input)} {nameof(MediaFrame)} has already been released (it's stale).");
                        }

                        if (Components.HasAudio)
                        {
                            Components.Audio.MaterializeFrame(input, ref output);
                        }
                        return(output);

                    case MediaType.Subtitle:
                        // We don't need to heck if subtitles are stale because they are immediately released
                        // upon decoding. This is because there is no unmanaged allocator for AVSubtitle.

                        if (Components.HasSubtitles)
                        {
                            Components.Subtitles.MaterializeFrame(input, ref output);
                        }
                        return(output);

                    default:
                        throw new MediaContainerException($"Unable to materialize {nameof(MediaType)} {(int)input.MediaType}");
                    }
                }
                finally
                {
                    if (releaseInput)
                    {
                        input.Dispose();
                    }
                }
            }
        }
Пример #7
0
 /// <summary>
 /// Pushes the specified frame into the queue.
 /// In other words, enqueues the frame.
 /// </summary>
 /// <param name="frame">The frame.</param>
 public void Push(MediaFrame frame)
 {
     lock (SyncRoot)
         Frames.Add(frame);
 }