예제 #1
0
        /// <inheritdoc />
        public void Dispose()
        {
            if (isDisposed)
            {
                return;
            }

            isDisposed = true;

            // Remove all streams
            // Note: native streams are freed along the native context.
            streams.Clear();
            ClearStreamInfo();

            if (!IsOpen || !FFmpegUtils.CheckPlatformSupport())
            {
                return;
            }

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.av_free(pCpuCopyFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            var pFormatContext = AVFormatContext;

            ffmpeg.avformat_close_input(&pFormatContext);
            ffmpeg.avformat_free_context(pFormatContext);
        }
예제 #2
0
        public bool SeekToTime([NotNull] FFmpegStream stream, long timestamp)
        {
            FFmpegUtils.EnsurePlatformSupport();

            var skip_to_keyframe = stream.AVStream->skip_to_keyframe;

            try
            {
                if (currentStreams.TryGetValue(stream, out var streamInfo))
                {
                    // flush the codec buffered images
                    streamInfo.Codec.Flush(pDecodedFrame);
                }

                // flush the format buffered images
                ffmpeg.avformat_flush(AVFormatContext);

                // perform the actual seek
                stream.AVStream->skip_to_keyframe = 1;
                var ret = ffmpeg.av_seek_frame(AVFormatContext, stream.Index, timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD);
                if (ret < 0)
                {
                    Logger.Error($"Could not seek frame. Error code={ret.ToString("X8")}");
                    Logger.Error(GetErrorMessage(ret));
                    return(false);
                }

                return(true);
            }
            finally
            {
                stream.AVStream->skip_to_keyframe = skip_to_keyframe;
            }
        }
예제 #3
0
 /// <summary>
 /// Initializes a new instance of the <see cref="FFmpegStream"/> class.
 /// </summary>
 protected FFmpegStream([NotNull] AVStream *pStream, FFmpegMedia media)
 {
     if (pStream == null)
     {
         throw new ArgumentNullException(nameof(pStream));
     }
     AVStream = pStream;
     Codec    = pStream->codec->codec_id;
     Media    = media;
     Index    = pStream->index;
     Metadata = FFmpegUtils.ToDictionary(pStream->metadata);
 }
예제 #4
0
        public int ExtractFrames([NotNull] FFmpegStream stream, int count)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (!IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media isn't open.");
            }

            var codecContext = *stream.AVStream->codec;
            var streamInfo   = GetStreamInfo(stream);

            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)streamInfo.Image.Buffer, DestinationPixelFormat, codecContext.width, codecContext.height, 1);
            streamInfo.Image.Linesize = dstLinesize[0];

            var extractedFrameCount = 0;

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            for (int i = 0; i < count; i++)
            {
                var extractionStatus = ExtractNextImage(streamInfo, pPacket, stream.AVStream, dstData, dstLinesize);
                streamInfo.ReachedEnd = extractionStatus == FrameExtractionStatus.ReachEOF;
                if (extractionStatus == FrameExtractionStatus.Succeeded)
                {
                    ++extractedFrameCount;
                }
            }

            return(extractedFrameCount);
        }
예제 #5
0
        /// <summary>
        /// Opens this media.
        /// </summary>
        /// <remarks>
        /// Once the media is open, the collection of <see cref="Streams"/> is populated.
        /// </remarks>
        public void Open(string url, long startPosition = 0, long length = -1)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media is already open.");
            }

            if (startPosition != 0 && length != -1)
            {
                url = $@"subfile,,start,{startPosition},end,{startPosition + length},,:{url}";
            }

            var pFormatContext = ffmpeg.avformat_alloc_context();
            var ret            = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);

            if (ret < 0)
            {
                Logger.Error($"Could not open file. Error code={ret.ToString("X8")}");
                Logger.Error(GetErrorMessage(ret));
                throw new ApplicationException(@"Could not open file.");
            }

            ret = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            if (ret < 0)
            {
                Logger.Error($"Could not find stream info. Error code={ret.ToString("X8")}");
                Logger.Error(GetErrorMessage(ret));
                throw new ApplicationException(@"Could not find stream info.");
            }

            AVFormatContext = pFormatContext;
            Duration        = TimeSpan.FromSeconds((double)AVFormatContext->duration / ffmpeg.AV_TIME_BASE);
            Url             = url;

            // Get the streams
            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                var stream = FFmpegStream.Create(pFormatContext->streams[i], this);
                streams.Add(stream);
            }

            pDecodedFrame = ffmpeg.av_frame_alloc();
            if (pDecodedFrame == null)
            {
                throw new ApplicationException("Couldn't allocate a frame for decoding.");
            }

            pCpuCopyFrame = ffmpeg.av_frame_alloc();
            if (pCpuCopyFrame == null)
            {
                throw new ApplicationException("Couldn't allocate a frame for hardware decoding.");
            }

            // dispose cached video image from previous file
            ClearStreamInfo();
        }