/// <summary> /// Initializes a new instance of the <see cref="AVIOContext"/> class. /// </summary> /// <param name="ffmpeg"> /// An implementation of the <see cref="FFmpegClient"/> interface which provides access to the /// native FFmpeg functions. /// </param> /// <param name="bufferSize"> /// The buffer size. /// </param> /// <param name="read_packet"> /// A function for refilling the buffer, may be NULL. For stream protocols, must never return 0 but rather a proper AVERROR code. /// </param> /// <param name="write_packet"> /// A function for writing the buffer contents, may be NULL. The function may not change the input buffers content. /// </param> public AVIOContext(FFmpegClient ffmpeg, ulong bufferSize, NativeReadPacketFunc read_packet, NativeWritePacketFunc?write_packet) { this.ffmpeg = ffmpeg; void *memory = ffmpeg.AllocMemory(bufferSize); var memoryHandle = new AVMemoryHandle(ffmpeg, memory, false); var avio_ctx = ffmpeg.AllocAVIOContext((byte *)memory, (int)bufferSize, write_packet == null ? 0 : 1, (void *)IntPtr.Zero, read_packet, write_packet.GetValueOrDefault(), null); this.buffer = memoryHandle; this.handle = new AVIOContextHandle(ffmpeg, avio_ctx); }
/// <summary> /// Decodes the current stream and blocks until decoding is done. /// </summary> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> which can be used to cancel the asynchronous operation. /// </param> public virtual unsafe void Decode(CancellationToken cancellationToken = default) { if (this.disposed) { throw new ObjectDisposedException(nameof(H264Decoder)); } var readDelegate = new NativeReadPacket(this.Read); var readFunc = new NativeReadPacketFunc() { Pointer = Marshal.GetFunctionPointerForDelegate(readDelegate), }; using (var ioContext = new AVIOContext(this.client, ReadBufferSize, readFunc, null)) using (var formatContext = new AVFormatContext(this.client, ioContext)) { formatContext.OpenInputStream("h264"); var stream = formatContext.GetVideoStream(); using (var codec = new AVCodec(this.client, stream)) using (var frame = new AVFrame(this.client)) using (var packet = new AVPacket(this.client)) { int frameNumber = 0; while (packet.ReadFrame(formatContext)) { this.logger.LogDebug($"Got a frame for stream {packet.StreamIndex}"); if (packet.NativeObject->stream_index != stream.Index) { continue; } this.logger.LogDebug("Sending packet"); codec.SendPacket(packet); int framesInPacket = 0; while (codec.ReceiveFrame(frame)) { this.logger.LogDebug("Receiving frame"); framesInPacket += 1; if (frame.PictureType != NativeAVPictureType.AV_PICTURE_TYPE_NONE) { this.logger.LogDebug($"Got a picture of {frame.Width}x{frame.Height} in color space {frame.Format}"); // decode frame this.FrameBuffer.DecompresFrame( frame.Width, frame.Height, frame.Width, frame.Height, frame.Width * 4, new Span <byte>(frame.NativeObject->data[0], frame.NativeObject->linesize[0]), new Span <byte>(frame.NativeObject->data[1], frame.NativeObject->linesize[1]), new Span <byte>(frame.NativeObject->data[2], frame.NativeObject->linesize[2]), new int[] { frame.NativeObject->linesize[0], frame.NativeObject->linesize[1], frame.NativeObject->linesize[2] }); } } this.logger.LogInformation($"Add {framesInPacket} frames in packet."); frameNumber++; } } } GC.KeepAlive(readDelegate); }