/// <summary> /// Initializes a new instance of the <see cref="AVFormatContext"/> class. /// </summary> /// <param name="client"> /// An implementation of the <see cref="FFmpegClient"/> which provides access to the native FFmpeg functions. /// </param> /// <param name="ioContext"> /// The I/O context which enables reading and writing data using custom callbacks. /// </param> /// <param name="formatContextHandle"> /// The formatContextHandle. /// </param> public AVFormatContext(FFmpegClient client, AVIOContext ioContext, AVFormatContextHandle formatContextHandle) { this.client = client; this.handle = formatContextHandle; this.ioContext = ioContext; this.NativeObject->pb = ioContext.NativeObject; }
/// <summary> /// Initializes a new instance of the <see cref="AVFormatContext"/> class. /// </summary> /// <param name="client"> /// An implementation of the <see cref="FFmpegClient"/> which provides access to the native FFmpeg functions. /// </param> /// <param name="ioContext"> /// The I/O context which enables reading and writing data using custom callbacks. /// </param> public AVFormatContext(FFmpegClient client, AVIOContext ioContext) : this(client, ioContext, new AVFormatContextHandle(client, client.AllocAVFormatContext())) { }
/// <summary> /// Decodes the current stream and blocks until decoding is done. /// </summary> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> which can be used to cancel the asynchronous operation. /// </param> public virtual unsafe void Decode(CancellationToken cancellationToken = default) { if (this.disposed) { throw new ObjectDisposedException(nameof(H264Decoder)); } var readDelegate = new NativeReadPacket(this.Read); var readFunc = new NativeReadPacketFunc() { Pointer = Marshal.GetFunctionPointerForDelegate(readDelegate), }; using (var ioContext = new AVIOContext(this.client, ReadBufferSize, readFunc, null)) using (var formatContext = new AVFormatContext(this.client, ioContext)) { formatContext.OpenInputStream("h264"); var stream = formatContext.GetVideoStream(); using (var codec = new AVCodec(this.client, stream)) using (var frame = new AVFrame(this.client)) using (var packet = new AVPacket(this.client)) { int frameNumber = 0; while (packet.ReadFrame(formatContext)) { this.logger.LogDebug($"Got a frame for stream {packet.StreamIndex}"); if (packet.NativeObject->stream_index != stream.Index) { continue; } this.logger.LogDebug("Sending packet"); codec.SendPacket(packet); int framesInPacket = 0; while (codec.ReceiveFrame(frame)) { this.logger.LogDebug("Receiving frame"); framesInPacket += 1; if (frame.PictureType != NativeAVPictureType.AV_PICTURE_TYPE_NONE) { this.logger.LogDebug($"Got a picture of {frame.Width}x{frame.Height} in color space {frame.Format}"); // decode frame this.FrameBuffer.DecompresFrame( frame.Width, frame.Height, frame.Width, frame.Height, frame.Width * 4, new Span <byte>(frame.NativeObject->data[0], frame.NativeObject->linesize[0]), new Span <byte>(frame.NativeObject->data[1], frame.NativeObject->linesize[1]), new Span <byte>(frame.NativeObject->data[2], frame.NativeObject->linesize[2]), new int[] { frame.NativeObject->linesize[0], frame.NativeObject->linesize[1], frame.NativeObject->linesize[2] }); } } this.logger.LogInformation($"Add {framesInPacket} frames in packet."); frameNumber++; } } } GC.KeepAlive(readDelegate); }