/// <summary> /// Receives decoded output data from a decoder. /// </summary> /// <param name="frame"> /// The frame. /// </param> /// <returns> /// A value indicating wheter a frame was received. /// </returns> public bool ReceiveFrame(AVFrame frame) { var ret = this.client.ReceiveFrame(this.context, frame); if ((UnixError)(-ret) == UnixError.EAGAIN || (AVError)ret == AVError.EndOfFile) { return(false); } this.client.ThrowOnAVError(ret); return(true); }
/// <summary> /// Unreference all the buffers referenced by frame and reset the frame fields. /// </summary> /// <param name="frame"> /// The frame. /// </param> public virtual void UnrefFrame(AVFrame frame) { ffmpeg.av_frame_unref(frame.NativeObject); }
/// <summary> /// Return decoded output data from a decoder. /// </summary> /// <param name="context"> /// The codec context. /// </param> /// <param name="frame"> /// This will be set to a reference-counted video or audio frame. /// </param> /// <returns> /// 0: success, a frame was returned AVERROR(EAGAIN): output is not available in this state. /// </returns> public int ReceiveFrame(AVCodecContext *context, AVFrame frame) { return(ffmpeg.avcodec_receive_frame(context, frame.NativeObject)); }
/// <summary> /// Decodes the current stream and blocks until decoding is done. /// </summary> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> which can be used to cancel the asynchronous operation. /// </param> public virtual unsafe void Decode(CancellationToken cancellationToken = default) { if (this.disposed) { throw new ObjectDisposedException(nameof(H264Decoder)); } var readDelegate = new NativeReadPacket(this.Read); var readFunc = new NativeReadPacketFunc() { Pointer = Marshal.GetFunctionPointerForDelegate(readDelegate), }; using (var ioContext = new AVIOContext(this.client, ReadBufferSize, readFunc, null)) using (var formatContext = new AVFormatContext(this.client, ioContext)) { formatContext.OpenInputStream("h264"); var stream = formatContext.GetVideoStream(); using (var codec = new AVCodec(this.client, stream)) using (var frame = new AVFrame(this.client)) using (var packet = new AVPacket(this.client)) { int frameNumber = 0; while (packet.ReadFrame(formatContext)) { this.logger.LogDebug($"Got a frame for stream {packet.StreamIndex}"); if (packet.NativeObject->stream_index != stream.Index) { continue; } this.logger.LogDebug("Sending packet"); codec.SendPacket(packet); int framesInPacket = 0; while (codec.ReceiveFrame(frame)) { this.logger.LogDebug("Receiving frame"); framesInPacket += 1; if (frame.PictureType != NativeAVPictureType.AV_PICTURE_TYPE_NONE) { this.logger.LogDebug($"Got a picture of {frame.Width}x{frame.Height} in color space {frame.Format}"); // decode frame this.FrameBuffer.DecompresFrame( frame.Width, frame.Height, frame.Width, frame.Height, frame.Width * 4, new Span <byte>(frame.NativeObject->data[0], frame.NativeObject->linesize[0]), new Span <byte>(frame.NativeObject->data[1], frame.NativeObject->linesize[1]), new Span <byte>(frame.NativeObject->data[2], frame.NativeObject->linesize[2]), new int[] { frame.NativeObject->linesize[0], frame.NativeObject->linesize[1], frame.NativeObject->linesize[2] }); } } this.logger.LogInformation($"Add {framesInPacket} frames in packet."); frameNumber++; } } } GC.KeepAlive(readDelegate); }