/// <summary> /// Creates a frame source object given the raw FFmpeg subtitle reference. /// </summary> /// <param name="framePointer">The raw FFmpeg subtitle pointer.</param> /// <returns>The managed frame</returns> protected override unsafe MediaFrame CreateFrameSource(IntPtr framePointer) { var frame = (AVSubtitle *)framePointer; var frameHolder = new SubtitleFrame(frame, this); return(frameHolder); }
/// <summary> /// Receives 0 or more frames from the next available packet in the Queue. /// This sends the first available packet to dequeue to the decoder /// and uses the decoded frames (if any) to their corresponding /// ProcessFrame method. /// </summary> /// <returns>The list of frames</returns> private List <MediaFrame> DecodeNextPacketInternal() { var result = new List <MediaFrame>(); // Ensure there is at least one packet in the queue if (PacketBufferCount <= 0) { return(result); } // Setup some initial state variables var packet = Packets.Dequeue(); // The packets are alwasy sent. We dequeue them and keep a reference to them // in the SentPackets queue SentPackets.Push(packet); var receiveFrameResult = 0; if (MediaType == MediaType.Audio || MediaType == MediaType.Video) { // If it's audio or video, we use the new API and the decoded frames are stored in AVFrame // Let us send the packet to the codec for decoding a frame of uncompressed data later. // TODO: sendPacketResult is never checked for errors... We require some error handling. // for example when using h264_qsv codec, this returns -40 (Function not implemented) var sendPacketResult = ffmpeg.avcodec_send_packet(CodecContext, IsEmptyPacket(packet) ? null : packet); // Let's check and see if we can get 1 or more frames from the packet we just sent to the decoder. // Audio packets will typically contain 1 or more audioframes // Video packets might require several packets to decode 1 frame MediaFrame managedFrame = null; while (receiveFrameResult == 0) { // Allocate a frame in unmanaged memory and // Try to receive the decompressed frame data var outputFrame = ffmpeg.av_frame_alloc(); RC.Current.Add(outputFrame, $"327: {nameof(MediaComponent)}[{MediaType}].{nameof(DecodeNextPacketInternal)}()"); receiveFrameResult = ffmpeg.avcodec_receive_frame(CodecContext, outputFrame); try { managedFrame = null; if (receiveFrameResult == 0) { // Send the frame to processing managedFrame = CreateFrameSource(ref outputFrame); if (managedFrame != null) { result.Add(managedFrame); } } if (managedFrame == null) { RC.Current.Remove(outputFrame); ffmpeg.av_frame_free(&outputFrame); } } catch { // Release the frame as the decoded data could not be processed RC.Current.Remove(outputFrame); ffmpeg.av_frame_free(&outputFrame); throw; } } } else if (MediaType == MediaType.Subtitle) { // Fors subtitles we use the old API (new API send_packet/receive_frame) is not yet available var gotFrame = 0; var outputFrame = SubtitleFrame.AllocateSubtitle(); receiveFrameResult = ffmpeg.avcodec_decode_subtitle2(CodecContext, outputFrame, &gotFrame, packet); // Check if there is an error decoding the packet. // If there is, remove the packet clear the sent packets if (receiveFrameResult < 0) { SubtitleFrame.DeallocateSubtitle(outputFrame); SentPackets.Clear(); Container.Parent?.Log(MediaLogMessageType.Error, $"{MediaType}: Error decoding. Error Code: {receiveFrameResult}"); } else { // Process the first frame if we got it from the packet // Note that there could be more frames (subtitles) in the packet if (gotFrame != 0) { try { // Send the frame to processing var managedFrame = CreateFrameSource(outputFrame); if (managedFrame == null) { throw new MediaContainerException($"{MediaType} Component does not implement {nameof(CreateFrameSource)}"); } result.Add(managedFrame); } catch { // Once processed, we don't need it anymore. Release it. SubtitleFrame.DeallocateSubtitle(outputFrame); throw; } } // Let's check if we have more decoded frames from the same single packet // Packets may contain more than 1 frame and the decoder is drained // by passing an empty packet (data = null, size = 0) while (gotFrame != 0 && receiveFrameResult > 0) { outputFrame = SubtitleFrame.AllocateSubtitle(); var emptyPacket = ffmpeg.av_packet_alloc(); RC.Current.Add(emptyPacket, $"406: {nameof(MediaComponent)}[{MediaType}].{nameof(DecodeNextPacketInternal)}()"); // Receive the frames in a loop try { receiveFrameResult = ffmpeg.avcodec_decode_subtitle2(CodecContext, outputFrame, &gotFrame, emptyPacket); if (gotFrame != 0 && receiveFrameResult > 0) { // Send the subtitle to processing var managedFrame = CreateFrameSource(outputFrame); if (managedFrame == null) { throw new MediaContainerException($"{MediaType} Component does not implement {nameof(CreateFrameSource)}"); } result.Add(managedFrame); } } catch { // once the subtitle is processed. Release it from memory SubtitleFrame.DeallocateSubtitle(outputFrame); throw; } finally { // free the empty packet RC.Current.Remove(emptyPacket); ffmpeg.av_packet_free(&emptyPacket); } } } } // Release the sent packets if 1 or more frames were received in the packet if (result.Count >= 1 || (Container.IsAtEndOfStream && IsEmptyPacket(packet) && PacketBufferCount == 0)) { // We clear the sent packet queue (releasing packet from unmanaged memory also) // because we got at least 1 frame from the packet. SentPackets.Clear(); } return(result); }
/// <summary> /// Creates a frame source object given the raw FFmpeg subtitle reference. /// </summary> /// <param name="frame">The raw FFmpeg subtitle pointer.</param> /// <returns>The managed frame</returns> protected override unsafe MediaFrame CreateFrameSource(AVSubtitle *frame) { var frameHolder = new SubtitleFrame(frame, this); return(frameHolder); }