예제 #1
0
        public bool SeekToTime([NotNull] FFmpegStream stream, long timestamp)
        {
            FFmpegUtils.EnsurePlatformSupport();

            var skip_to_keyframe = stream.AVStream->skip_to_keyframe;

            try
            {
                if (currentStreams.TryGetValue(stream, out var streamInfo))
                {
                    // flush the codec buffered images
                    streamInfo.Codec.Flush(pDecodedFrame);
                }

                // flush the format buffered images
                ffmpeg.avformat_flush(AVFormatContext);

                // perform the actual seek
                stream.AVStream->skip_to_keyframe = 1;
                var ret = ffmpeg.av_seek_frame(AVFormatContext, stream.Index, timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD);
                if (ret < 0)
                {
                    Logger.Error($"Could not seek frame. Error code={ret.ToString("X8")}");
                    Logger.Error(GetErrorMessage(ret));
                    return(false);
                }

                return(true);
            }
            finally
            {
                stream.AVStream->skip_to_keyframe = skip_to_keyframe;
            }
        }
예제 #2
0
        private StreamInfo GetStreamInfo([NotNull] FFmpegStream stream)
        {
            if (!currentStreams.TryGetValue(stream, out var streamInfo))
            {
                var codecContext = *(stream.AVStream->codec);
                var width        = codecContext.width;
                var height       = codecContext.height;

                var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(DestinationPixelFormat, width, height, 1);
                convertedFrameBufferSize = (convertedFrameBufferSize + 3) & ~0x03; // align on a boundary of 4 (32-bits)

                currentStreams[stream] = streamInfo = new StreamInfo
                {
                    Codec = new FFmpegCodec(graphicsDevice, &codecContext),
                    Image = new VideoImage(width, height, convertedFrameBufferSize),
                };
            }

            return(streamInfo);
        }
예제 #3
0
        public int ExtractFrames([NotNull] FFmpegStream stream, int count)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (!IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media isn't open.");
            }

            var codecContext = *stream.AVStream->codec;
            var streamInfo   = GetStreamInfo(stream);

            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)streamInfo.Image.Buffer, DestinationPixelFormat, codecContext.width, codecContext.height, 1);
            streamInfo.Image.Linesize = dstLinesize[0];

            var extractedFrameCount = 0;

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            for (int i = 0; i < count; i++)
            {
                var extractionStatus = ExtractNextImage(streamInfo, pPacket, stream.AVStream, dstData, dstLinesize);
                streamInfo.ReachedEnd = extractionStatus == FrameExtractionStatus.ReachEOF;
                if (extractionStatus == FrameExtractionStatus.Succeeded)
                {
                    ++extractedFrameCount;
                }
            }

            return(extractedFrameCount);
        }
예제 #4
0
        /// <summary>
        /// Returns true if the provided stream is a stereoscopic video.
        /// </summary>
        /// <remarks>This function may read the first frame of the video if necessary</remarks>
        /// <param name="stream"></param>
        /// <returns></returns>
        public bool IsStereoscopicVideo([NotNull] FFmpegStream stream)
        {
            // try first to get the side data information from the stream if available.
            if (ffmpeg.av_stream_get_side_data(stream.AVStream, AVPacketSideDataType.AV_PKT_DATA_STEREO3D, null) != null)
            {
                return(true);
            }

            // Unfortunately the side data was not present in the stream
            // -> we need to decode and look in the first packet and frame.
            var streamInfo = GetStreamInfo(stream);
            var packet     = new AVPacket();
            var pPacket    = &packet;

            ffmpeg.av_init_packet(pPacket);
            var pCodecContext = streamInfo.Codec.pAVCodecContext;

            try
            {
                while (true)
                {
                    var ret = ffmpeg.av_read_frame(AVFormatContext, pPacket);
                    if (ret < 0)
                    {
                        if (ret == ffmpeg.AVERROR_EOF)
                        {
                            return(false);
                        }

                        Logger.Error($"Could not read frame. Error code={ret.ToString("X8")}.");
                        Logger.Error(GetErrorMessage(ret));
                        return(false);
                    }

                    // Note: the other stream might be sound (which we will want to process at some point)
                    if (pPacket->stream_index != stream.AVStream->index)
                    {
                        continue;
                    }

                    // check the side data on the packet
                    var packetSideData = ffmpeg.av_packet_get_side_data(pPacket, AVPacketSideDataType.AV_PKT_DATA_STEREO3D, null);
                    if (packetSideData != null)
                    {
                        return(true);
                    }

                    ret = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                    if (ret < 0)
                    {
                        Logger.Error($"Error while sending packet. Error code={ret.ToString("X8")}");
                        Logger.Error(GetErrorMessage(ret));
                        return(false);
                    }

                    ret = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
                    if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN)) // the media file is not ready yet.
                    {
                        Utilities.Sleep(5);
                        continue;
                    }
                    if (ret < 0)
                    {
                        Logger.Error($"Error while receiving frame. Error code={ret.ToString("X8")}");
                        Logger.Error(GetErrorMessage(ret));
                        return(false);
                    }

                    // check the side data on the frame
                    var frameSideData = ffmpeg.av_frame_get_side_data(pDecodedFrame, AVFrameSideDataType.AV_FRAME_DATA_STEREO3D);
                    if (frameSideData != null)
                    {
                        return(true);
                    }

                    // If we reach this point it means that the first packet and frame have been decoded but do not contain any side_data.
                    return(false);
                }
            }
            finally
            {
                ffmpeg.av_packet_unref(pPacket);
                ffmpeg.av_frame_unref(pDecodedFrame);

                // return to the beginning of the media file (just in case)
                SeekToTime(stream, 0);
            }
        }
예제 #5
0
        /// <summary>
        /// Opens this media.
        /// </summary>
        /// <remarks>
        /// Once the media is open, the collection of <see cref="Streams"/> is populated.
        /// </remarks>
        public void Open(string url, long startPosition = 0, long length = -1)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media is already open.");
            }

            if (startPosition != 0 && length != -1)
            {
                url = $@"subfile,,start,{startPosition},end,{startPosition + length},,:{url}";
            }

            var pFormatContext = ffmpeg.avformat_alloc_context();
            var ret            = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);

            if (ret < 0)
            {
                Logger.Error($"Could not open file. Error code={ret.ToString("X8")}");
                Logger.Error(GetErrorMessage(ret));
                throw new ApplicationException(@"Could not open file.");
            }

            ret = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            if (ret < 0)
            {
                Logger.Error($"Could not find stream info. Error code={ret.ToString("X8")}");
                Logger.Error(GetErrorMessage(ret));
                throw new ApplicationException(@"Could not find stream info.");
            }

            AVFormatContext = pFormatContext;
            Duration        = TimeSpan.FromSeconds((double)AVFormatContext->duration / ffmpeg.AV_TIME_BASE);
            Url             = url;

            // Get the streams
            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                var stream = FFmpegStream.Create(pFormatContext->streams[i], this);
                streams.Add(stream);
            }

            pDecodedFrame = ffmpeg.av_frame_alloc();
            if (pDecodedFrame == null)
            {
                throw new ApplicationException("Couldn't allocate a frame for decoding.");
            }

            pCpuCopyFrame = ffmpeg.av_frame_alloc();
            if (pCpuCopyFrame == null)
            {
                throw new ApplicationException("Couldn't allocate a frame for hardware decoding.");
            }

            // dispose cached video image from previous file
            ClearStreamInfo();
        }