/// <summary> /// Sends a packet. /// </summary> /// <param name="packet"> /// The packet. /// </param> public void SendPacket(AVPacket packet) { int ret = 0; if (!this.client.IsCodecOpen((IntPtr)this.context)) { throw new InvalidOperationException("You must first open the codec for this context, via avcodec_open2."); } if (!this.client.IsDecoder((IntPtr)this.native)) { throw new InvalidOperationException("The codec for this context is not a decoder. You cannot send packets."); } ret = this.client.SendPacket((IntPtr)this.context, (IntPtr)packet.NativeObject); this.client.ThrowOnAVError(ret); }
/// <summary> /// Wipe the packet. /// </summary> /// <param name="packet"> /// The packet. /// </param> public virtual void UnrefPacket(AVPacket packet) { ffmpeg.av_packet_unref(packet.NativeObject); }
/// <summary> /// Return the next frame of a stream. /// </summary> /// <param name="context"> /// The context. /// </param> /// <param name="packet"> /// The packet. /// </param> /// <returns> /// 0 if OK, negative on error or end of file. /// </returns> public virtual int ReadFrame(AVFormatContext context, AVPacket packet) { return(ffmpeg.av_read_frame(context.NativeObject, packet.NativeObject)); }
/// <summary> /// Decodes the current stream and blocks until decoding is done. /// </summary> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> which can be used to cancel the asynchronous operation. /// </param> public virtual unsafe void Decode(CancellationToken cancellationToken = default) { if (this.disposed) { throw new ObjectDisposedException(nameof(H264Decoder)); } var readDelegate = new NativeReadPacket(this.Read); var readFunc = new NativeReadPacketFunc() { Pointer = Marshal.GetFunctionPointerForDelegate(readDelegate), }; using (var ioContext = new AVIOContext(this.client, ReadBufferSize, readFunc, null)) using (var formatContext = new AVFormatContext(this.client, ioContext)) { formatContext.OpenInputStream("h264"); var stream = formatContext.GetVideoStream(); using (var codec = new AVCodec(this.client, stream)) using (var frame = new AVFrame(this.client)) using (var packet = new AVPacket(this.client)) { int frameNumber = 0; while (packet.ReadFrame(formatContext)) { this.logger.LogDebug($"Got a frame for stream {packet.StreamIndex}"); if (packet.NativeObject->stream_index != stream.Index) { continue; } this.logger.LogDebug("Sending packet"); codec.SendPacket(packet); int framesInPacket = 0; while (codec.ReceiveFrame(frame)) { this.logger.LogDebug("Receiving frame"); framesInPacket += 1; if (frame.PictureType != NativeAVPictureType.AV_PICTURE_TYPE_NONE) { this.logger.LogDebug($"Got a picture of {frame.Width}x{frame.Height} in color space {frame.Format}"); // decode frame this.FrameBuffer.DecompresFrame( frame.Width, frame.Height, frame.Width, frame.Height, frame.Width * 4, new Span <byte>(frame.NativeObject->data[0], frame.NativeObject->linesize[0]), new Span <byte>(frame.NativeObject->data[1], frame.NativeObject->linesize[1]), new Span <byte>(frame.NativeObject->data[2], frame.NativeObject->linesize[2]), new int[] { frame.NativeObject->linesize[0], frame.NativeObject->linesize[1], frame.NativeObject->linesize[2] }); } } this.logger.LogInformation($"Add {framesInPacket} frames in packet."); frameNumber++; } } } GC.KeepAlive(readDelegate); }